[ 668.198192] env[69796]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69796) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 668.198613] env[69796]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69796) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 668.198653] env[69796]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69796) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 668.198973] env[69796]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 668.299632] env[69796]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69796) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 668.309669] env[69796]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69796) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 668.353102] env[69796]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 668.913889] env[69796]: INFO nova.virt.driver [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 668.984607] env[69796]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.984765] env[69796]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.984883] env[69796]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69796) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 671.934864] env[69796]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-8673cd98-341d-4bbd-9542-6b4e1f85e4e3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.952116] env[69796]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69796) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 671.952262] env[69796]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-281942d1-1373-4683-a529-961320f943a8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.986565] env[69796]: INFO oslo_vmware.api [-] Successfully established new session; session ID is fde3a. [ 671.986744] env[69796]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.002s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.987311] env[69796]: INFO nova.virt.vmwareapi.driver [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] VMware vCenter version: 7.0.3 [ 671.990825] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ee9ec1-6466-40e5-9cd7-a2488d5dc9fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.009822] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd010cc-7166-4baa-8a01-3277714686c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.016339] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389c0ee3-0366-441c-976c-71ed309300b6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.023197] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71cf72f4-755a-48f7-b6a7-a1af6a70520d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.036441] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e6c14fe-f0e2-4d0e-b165-2c51ff36ffb1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.042604] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7103eca5-306f-460f-9733-548670932ff4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.073016] env[69796]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-b84e3e8a-c7fe-43fa-bcff-b0f8a686f8ea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.078802] env[69796]: DEBUG nova.virt.vmwareapi.driver [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] Extension org.openstack.compute already exists. {{(pid=69796) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 672.081473] env[69796]: INFO nova.compute.provider_config [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 672.585323] env[69796]: DEBUG nova.context [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),eed28077-d77d-4353-b7e2-195009431c1b(cell1) {{(pid=69796) load_cells /opt/stack/nova/nova/context.py:472}} [ 672.585641] env[69796]: INFO nova.utils [None req-c6a74045-e8f8-4947-9420-e72028daa5f1 None None] The cell worker thread pool MainProcess.cell_worker is initialized [ 672.587747] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.587977] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.588708] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.589173] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Acquiring lock "eed28077-d77d-4353-b7e2-195009431c1b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.589370] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Lock "eed28077-d77d-4353-b7e2-195009431c1b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.590480] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Lock "eed28077-d77d-4353-b7e2-195009431c1b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.611410] env[69796]: INFO dbcounter [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Registered counter for database nova_cell0 [ 672.620177] env[69796]: INFO dbcounter [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Registered counter for database nova_cell1 [ 672.623776] env[69796]: DEBUG oslo_db.sqlalchemy.engines [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69796) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 672.624475] env[69796]: DEBUG oslo_db.sqlalchemy.engines [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69796) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 672.629376] env[69796]: ERROR nova.db.main.api [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 672.629376] env[69796]: func(*args, **kwargs) [ 672.629376] env[69796]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 672.629376] env[69796]: self.work.run() [ 672.629376] env[69796]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 672.629376] env[69796]: result = self.fn(*self.args, **self.kwargs) [ 672.629376] env[69796]: File "/opt/stack/nova/nova/utils.py", line 695, in context_wrapper [ 672.629376] env[69796]: return func(*args, **kwargs) [ 672.629376] env[69796]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 672.629376] env[69796]: result = fn(*args, **kwargs) [ 672.629376] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 672.629376] env[69796]: return f(*args, **kwargs) [ 672.629376] env[69796]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 672.629376] env[69796]: return db.service_get_minimum_version(context, binaries) [ 672.629376] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 672.629376] env[69796]: _check_db_access() [ 672.629376] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 672.629376] env[69796]: stacktrace = ''.join(traceback.format_stack()) [ 672.629376] env[69796]: [ 672.630483] env[69796]: ERROR nova.db.main.api [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 672.630483] env[69796]: func(*args, **kwargs) [ 672.630483] env[69796]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_green.py", line 69, in __call__ [ 672.630483] env[69796]: self.work.run() [ 672.630483] env[69796]: File "/opt/stack/data/venv/lib/python3.10/site-packages/futurist/_utils.py", line 45, in run [ 672.630483] env[69796]: result = self.fn(*self.args, **self.kwargs) [ 672.630483] env[69796]: File "/opt/stack/nova/nova/utils.py", line 695, in context_wrapper [ 672.630483] env[69796]: return func(*args, **kwargs) [ 672.630483] env[69796]: File "/opt/stack/nova/nova/context.py", line 420, in gather_result [ 672.630483] env[69796]: result = fn(*args, **kwargs) [ 672.630483] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 672.630483] env[69796]: return f(*args, **kwargs) [ 672.630483] env[69796]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 672.630483] env[69796]: return db.service_get_minimum_version(context, binaries) [ 672.630483] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 672.630483] env[69796]: _check_db_access() [ 672.630483] env[69796]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 672.630483] env[69796]: stacktrace = ''.join(traceback.format_stack()) [ 672.630483] env[69796]: [ 672.631094] env[69796]: WARNING nova.objects.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 672.631170] env[69796]: WARNING nova.objects.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Failed to get minimum service version for cell eed28077-d77d-4353-b7e2-195009431c1b [ 672.631606] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Acquiring lock "singleton_lock" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.631767] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Acquired lock "singleton_lock" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.632018] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Releasing lock "singleton_lock" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.632358] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Full set of CONF: {{(pid=69796) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 672.632501] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ******************************************************************************** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 672.632628] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] Configuration options gathered from: {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 672.632764] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 672.632956] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 672.633093] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ================================================================================ {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 672.633306] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] allow_resize_to_same_host = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.633506] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] arq_binding_timeout = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.633645] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] backdoor_port = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.633773] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] backdoor_socket = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.633939] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] block_device_allocate_retries = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634112] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] block_device_allocate_retries_interval = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634273] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cell_worker_thread_pool_size = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634474] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cert = self.pem {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634657] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634826] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute_monitors = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.634989] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] config_dir = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635193] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] config_drive_format = iso9660 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635329] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635493] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] config_source = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635661] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] console_host = devstack {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635827] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] control_exchange = nova {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.635984] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cpu_allocation_ratio = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636173] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] daemon = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636342] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] debug = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636497] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_access_ip_network_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636664] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_availability_zone = nova {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636821] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_ephemeral_format = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.636978] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_green_pool_size = 1000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.637234] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.637399] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] default_schedule_zone = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.637556] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] disk_allocation_ratio = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.637714] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] enable_new_services = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.637892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] enabled_apis = ['osapi_compute'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638066] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] enabled_ssl_apis = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638228] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] flat_injected = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638388] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] force_config_drive = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638545] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] force_raw_images = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638716] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] graceful_shutdown_timeout = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.638870] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] heal_instance_info_cache_interval = -1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639110] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] host = cpu-1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639293] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639458] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639619] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639835] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.639998] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_build_timeout = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.640186] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_delete_interval = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.640354] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_format = [instance: %(uuid)s] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.640516] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_name_template = instance-%08x {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.640679] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_usage_audit = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.640846] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_usage_audit_period = month {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641028] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641243] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641348] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] internal_service_availability_zone = internal {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641532] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] key = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641695] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] live_migration_retry_count = 30 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.641861] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_color = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642075] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_config_append = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642270] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642439] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_dir = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642598] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_file = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642727] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_options = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.642885] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_rotate_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643066] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_rotate_interval_type = days {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643237] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] log_rotation_type = none {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643390] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643518] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643690] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643856] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.643983] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.644159] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] long_rpc_timeout = 1800 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.644320] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_concurrent_builds = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.644503] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_concurrent_live_migrations = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.644729] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_concurrent_snapshots = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.644952] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_local_block_devices = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645160] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_logfile_count = 30 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645325] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] max_logfile_size_mb = 200 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645487] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] maximum_instance_delete_attempts = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645659] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metadata_listen = 0.0.0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645829] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metadata_listen_port = 8775 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.645998] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metadata_workers = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.646177] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] migrate_max_retries = -1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.646346] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] mkisofs_cmd = genisoimage {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.646552] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.646686] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] my_ip = 10.180.1.21 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.646892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647066] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] network_allocate_retries = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647247] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647414] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647578] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] osapi_compute_listen_port = 8774 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647747] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] osapi_compute_unique_server_name_scope = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.647914] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] osapi_compute_workers = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648094] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] password_length = 12 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648259] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] periodic_enable = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648418] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] periodic_fuzzy_delay = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648584] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] pointer_model = usbtablet {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648754] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] preallocate_images = none {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.648972] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] publish_errors = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649126] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] pybasedir = /opt/stack/nova {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649292] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ram_allocation_ratio = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649454] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rate_limit_burst = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649626] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rate_limit_except_level = CRITICAL {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649790] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rate_limit_interval = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.649945] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reboot_timeout = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650127] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reclaim_instance_interval = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650287] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] record = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650455] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reimage_timeout_per_gb = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650620] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] report_interval = 120 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650777] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rescue_timeout = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.650935] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reserved_host_cpus = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651107] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reserved_host_disk_mb = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651273] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reserved_host_memory_mb = 512 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651454] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] reserved_huge_pages = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651628] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] resize_confirm_window = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651791] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] resize_fs_using_block_device = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.651951] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] resume_guests_state_on_host_boot = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652133] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652298] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] rpc_response_timeout = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652457] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] run_external_periodic_tasks = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652627] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] running_deleted_instance_action = reap {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652785] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.652941] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] running_deleted_instance_timeout = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653163] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler_instance_sync_interval = 120 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653279] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_down_time = 720 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653476] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] servicegroup_driver = db {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653647] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] shell_completion = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653808] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] shelved_offload_time = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.653966] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] shelved_poll_interval = 3600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.654165] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] shutdown_timeout = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.654390] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] source_is_ipv6 = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 672.654565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ssl_only = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.125255] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.125619] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] sync_power_state_interval = 600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.125758] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] sync_power_state_pool_size = 1000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.125864] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] syslog_log_facility = LOG_USER {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126044] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] tempdir = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126227] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] timeout_nbd = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126406] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] transport_url = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126577] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] update_resources_interval = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126744] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_cow_images = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.126938] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_journal = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127113] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_json = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127279] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_rootwrap_daemon = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127440] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_stderr = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127613] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] use_syslog = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127775] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vcpu_pin_set = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.127947] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plugging_is_fatal = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128131] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plugging_timeout = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128304] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] virt_mkfs = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128469] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] volume_usage_poll_interval = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128633] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] watch_log_file = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128838] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] web = /usr/share/spice-html5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 673.128992] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.129196] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.129367] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.129579] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_concurrency.disable_process_locking = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.129917] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130127] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130299] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130470] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130643] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130810] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.130990] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.auth_strategy = keystone {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.131173] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.compute_link_prefix = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.131355] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.131560] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.dhcp_domain = novalocal {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.131739] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.enable_instance_password = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.131907] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.glance_link_prefix = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132083] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132259] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132422] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.instance_list_per_project_cells = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132584] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.list_records_by_skipping_down_cells = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132746] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.local_metadata_per_cell = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.132912] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.max_limit = 1000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.133108] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.metadata_cache_expiration = 15 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.133295] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.neutron_default_tenant_id = default {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.133503] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.response_validation = warn {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.133678] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.use_neutron_default_nets = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.133846] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134027] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134196] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134391] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134540] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_dynamic_targets = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134704] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_jsonfile_path = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.134884] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135091] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.backend = dogpile.cache.memcached {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135264] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.backend_argument = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135428] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.backend_expiration_time = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135600] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.config_prefix = cache.oslo {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135770] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.dead_timeout = 60.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.135934] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.debug_cache_backend = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136109] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.enable_retry_client = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136275] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.enable_socket_keepalive = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136449] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.enabled = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136611] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.enforce_fips_mode = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136772] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.expiration_time = 600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.136933] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.hashclient_retry_attempts = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137123] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137294] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_dead_retry = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137454] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_password = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137616] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137777] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.137938] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_pool_maxsize = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138115] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138281] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_sasl_enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138462] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138627] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138786] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.memcache_username = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.138983] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.proxies = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139125] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_db = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139287] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_password = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139456] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139646] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139822] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_server = localhost:6379 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.139985] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_socket_timeout = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140157] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.redis_username = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140321] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.retry_attempts = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140486] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.retry_delay = 0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140648] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.socket_keepalive_count = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140807] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.socket_keepalive_idle = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.140966] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.socket_keepalive_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.141151] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.tls_allowed_ciphers = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.141337] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.tls_cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.141535] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.tls_certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.141706] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.tls_enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.141864] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cache.tls_keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142047] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142225] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.auth_type = password {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142386] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142562] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142721] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.142882] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143054] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.cross_az_attach = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143219] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.debug = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143416] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.endpoint_template = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143568] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.http_retries = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143733] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.143892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144073] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.os_region_name = RegionOne {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144241] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144405] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cinder.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144576] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144734] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.cpu_dedicated_set = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.144892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.cpu_shared_set = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145081] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.image_type_exclude_list = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145256] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145420] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145586] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145750] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.145919] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146096] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.resource_provider_association_refresh = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146262] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146425] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.shutdown_retry_interval = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146609] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146788] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] conductor.workers = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.146968] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] console.allowed_origins = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147143] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] console.ssl_ciphers = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147314] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] console.ssl_minimum_version = default {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147482] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] consoleauth.enforce_session_timeout = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147653] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] consoleauth.token_ttl = 600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147819] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.147977] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148156] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148317] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148478] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148641] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148801] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.148969] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149147] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149310] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149471] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149631] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149791] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.149959] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.service_type = accelerator {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150137] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150297] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150458] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150626] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150807] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.150968] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] cyborg.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.151153] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.asyncio_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.151340] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.asyncio_slave_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.151553] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.backend = sqlalchemy {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.151734] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.151902] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.connection_debug = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152087] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.connection_parameters = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152259] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.connection_recycle_time = 3600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152424] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.connection_trace = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152589] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.db_inc_retry_interval = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152753] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.db_max_retries = 20 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.152916] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.db_max_retry_interval = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153104] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.db_retry_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153277] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.max_overflow = 50 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153475] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.max_pool_size = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153649] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.max_retries = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153822] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.153986] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.mysql_wsrep_sync_wait = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154165] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.pool_timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154330] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.retry_interval = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154492] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.slave_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154657] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.sqlite_synchronous = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154819] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] database.use_db_reconnect = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.154989] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.asyncio_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.155163] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.asyncio_slave_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.155335] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.backend = sqlalchemy {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.155504] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.155673] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.connection_debug = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.155843] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.connection_parameters = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156011] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.connection_recycle_time = 3600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156186] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.connection_trace = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156350] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.db_inc_retry_interval = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156515] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.db_max_retries = 20 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156679] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.db_max_retry_interval = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.156842] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.db_retry_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157009] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.max_overflow = 50 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157196] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.max_pool_size = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157361] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.max_retries = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157530] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157691] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.157848] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.pool_timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158024] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.retry_interval = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158179] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.slave_connection = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158341] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] api_database.sqlite_synchronous = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158515] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] devices.enabled_mdev_types = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158691] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.158862] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159065] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ephemeral_storage_encryption.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159201] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159361] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.api_servers = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159520] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159683] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.159845] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160009] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160176] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160339] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.debug = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160505] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.default_trusted_certificate_ids = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160667] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.enable_certificate_validation = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160828] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.enable_rbd_download = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.160988] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.161183] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.161371] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.161565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.161730] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.161895] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.num_retries = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162079] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.rbd_ceph_conf = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162251] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.rbd_connect_timeout = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162420] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.rbd_pool = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162601] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.rbd_user = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162765] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.162928] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163100] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163274] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.service_type = image {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163473] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163641] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163803] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.163963] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.164166] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.164336] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.verify_glance_signatures = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.164498] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] glance.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.164669] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] guestfs.debug = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.164839] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165010] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.auth_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165198] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165361] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165524] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165688] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.165844] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166011] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166181] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166341] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166504] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166664] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166823] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.166981] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167154] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167325] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.service_type = shared-file-system {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167491] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.share_apply_policy_timeout = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167657] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167816] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.167974] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.168147] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.168329] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.168492] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] manila.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.168665] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] mks.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169044] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169299] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.manager_interval = 2400 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169418] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.precache_concurrency = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169592] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.remove_unused_base_images = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169764] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.169932] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170124] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] image_cache.subdirectory_name = _base {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170304] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.api_max_retries = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170469] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.api_retry_interval = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170632] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170795] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.auth_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.170956] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171129] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171297] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171482] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.conductor_group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171651] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171813] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.171973] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172154] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172315] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172478] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172638] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172805] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.peer_list = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.172964] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.173153] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.173322] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.serial_console_state_timeout = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.173517] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.173692] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.service_type = baremetal {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.173857] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.shard = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174034] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174201] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174363] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174526] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174707] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.174868] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ironic.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175062] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175240] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] key_manager.fixed_key = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175424] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175588] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.barbican_api_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175755] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.barbican_endpoint = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.175923] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.barbican_endpoint_type = public {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176091] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.barbican_region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176257] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176416] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176581] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176736] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.176890] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177075] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.number_of_retries = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177249] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.retry_delay = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177415] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.send_service_user_token = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177578] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177736] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.177899] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.verify_ssl = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178069] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican.verify_ssl_path = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178243] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178409] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.auth_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178571] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178729] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.178892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179065] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179264] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179415] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179543] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] barbican_service_user.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179714] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.approle_role_id = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.179874] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.approle_secret_id = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180054] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.kv_mountpoint = secret {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180221] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.kv_path = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180387] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.kv_version = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180546] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.namespace = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180706] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.root_token_id = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.180865] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.ssl_ca_crt_file = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181050] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.timeout = 60.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181224] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.use_ssl = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181424] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181611] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181777] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.181944] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182117] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182283] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182444] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182608] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182767] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.182930] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183104] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183268] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183457] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183631] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183805] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.service_type = identity {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.183966] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.184140] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.184302] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.184464] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.184680] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.184848] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] keystone.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.185067] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.ceph_mount_options = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.185418] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.185608] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.connection_uri = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.185796] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_mode = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.185980] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.186171] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_models = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.186349] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_power_governor_high = performance {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.186519] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.186691] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_power_management = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.186866] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187045] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.device_detach_attempts = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187217] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.device_detach_timeout = 20 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187388] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.disk_cachemodes = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187555] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.disk_prefix = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187723] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.enabled_perf_events = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.187887] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.file_backed_memory = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188064] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.gid_maps = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188230] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.hw_disk_discard = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188391] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.hw_machine_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188566] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_rbd_ceph_conf = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188729] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.188893] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189083] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_rbd_glance_store_name = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189261] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_rbd_pool = rbd {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189435] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_type = default {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189627] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.images_volume_group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189756] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.inject_key = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.189918] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.inject_partition = -2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190094] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.inject_password = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190261] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.iscsi_iface = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190425] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.iser_use_multipath = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190589] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190751] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.190916] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_downtime = 500 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.191088] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.191254] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.191440] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_inbound_addr = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.191620] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.191904] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192089] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_scheme = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192271] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_timeout_action = abort {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192442] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_tunnelled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192606] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_uri = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192772] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.live_migration_with_native_tls = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.192933] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.max_queues = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.193127] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.193388] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.193558] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.nfs_mount_options = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.193865] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194059] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194235] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194404] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194591] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194767] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_pcie_ports = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.194936] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.195120] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.pmem_namespaces = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.195285] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.quobyte_client_cfg = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.195594] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.195764] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.195933] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196111] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196276] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rbd_secret_uuid = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196437] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rbd_user = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196620] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196802] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.196964] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rescue_image_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.197154] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rescue_kernel_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.197317] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rescue_ramdisk_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.197485] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.197646] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.rx_queue_size = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.197812] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.smbfs_mount_options = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.198119] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.198298] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.snapshot_compression = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.198465] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.snapshot_image_format = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.198704] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.198876] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.sparse_logical_volumes = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199052] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.swtpm_enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199228] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.swtpm_group = tss {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199400] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.swtpm_user = tss {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199573] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.sysinfo_serial = unique {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199736] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.tb_cache_size = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.199888] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.tx_queue_size = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200065] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.uid_maps = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200231] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.use_virtio_for_bridges = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200403] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.virt_type = kvm {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200575] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.volume_clear = zero {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200739] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.volume_clear_size = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.200903] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.volume_enforce_multipath = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201091] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.volume_use_multipath = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201261] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_cache_path = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201453] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201640] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201810] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.201983] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.202293] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.202476] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.vzstorage_mount_user = stack {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.202645] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.202894] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203126] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.auth_type = password {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203301] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203494] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203666] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203829] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.203990] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204177] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.default_floating_pool = public {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204341] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204509] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.extension_sync_interval = 600 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204673] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.http_retries = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204833] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.204992] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.205184] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.205358] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.205518] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.205689] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.ovs_bridge = br-int {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.205857] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.physnets = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206038] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.region_name = RegionOne {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206206] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206379] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.service_metadata_proxy = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206540] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206709] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.service_type = network {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.206871] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207039] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207202] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207361] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207542] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207702] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] neutron.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.207873] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.bdms_in_notifications = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208059] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.default_level = INFO {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208229] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.include_share_mapping = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208407] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.notification_format = unversioned {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208583] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.notify_on_state_change = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208764] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.208944] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] pci.alias = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209139] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] pci.device_spec = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209310] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] pci.report_in_placement = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209487] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209665] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.auth_type = password {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209849] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.209989] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210161] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210325] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210485] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210643] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210801] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.default_domain_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.210956] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.default_domain_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211124] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.domain_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211283] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.domain_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211466] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211646] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211805] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.211965] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212136] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212306] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.password = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212467] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.project_domain_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212673] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.project_domain_name = Default {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212814] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.project_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.212988] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.project_name = service {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.213189] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.region_name = RegionOne {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.213366] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.213550] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.213729] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.service_type = placement {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.213896] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214072] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214240] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214407] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.system_scope = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214564] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214724] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.trust_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.214882] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.user_domain_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215063] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.user_domain_name = Default {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215230] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.user_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215405] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.username = nova {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215591] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215774] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] placement.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.215967] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.cores = 20 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216147] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.count_usage_from_placement = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216323] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216494] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.injected_file_content_bytes = 10240 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216664] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.injected_file_path_length = 255 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216831] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.injected_files = 5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.216995] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.instances = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.217193] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.key_pairs = 100 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.217362] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.metadata_items = 128 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.217530] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.ram = 51200 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.217697] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.recheck_quota = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.217864] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.server_group_members = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218041] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.server_groups = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218256] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218435] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] quota.unified_limits_resource_strategy = require {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218609] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218775] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.218936] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.image_metadata_prefilter = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219111] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219280] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.max_attempts = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219443] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.max_placement_results = 1000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219611] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219771] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.219961] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220110] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] scheduler.workers = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220289] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220462] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220645] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220815] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.220979] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.221173] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.221340] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.221558] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.221735] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.host_subset_size = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.221901] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222076] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222242] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222424] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222592] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222757] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.isolated_hosts = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.222921] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.isolated_images = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223093] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223257] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223454] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223630] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.pci_in_placement = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223797] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.223961] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224139] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224305] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224471] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224634] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224796] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.track_instance_changes = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.224974] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.225177] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metrics.required = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.225346] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metrics.weight_multiplier = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.225509] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.225677] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] metrics.weight_setting = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226032] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226219] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226401] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.port_range = 10000:20000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226577] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226746] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.226920] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] serial_console.serialproxy_port = 6083 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227111] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227293] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.auth_type = password {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227458] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227621] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227784] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.227946] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228119] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228292] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.send_service_user_token = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228457] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228620] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] service_user.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228792] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.agent_enabled = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.228956] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.229303] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.229513] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.229689] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.html5proxy_port = 6082 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.229855] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.image_compression = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.230025] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.jpeg_compression = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.230195] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.playback_compression = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.230363] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.require_secure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.230542] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.server_listen = 127.0.0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.230713] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231009] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231191] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.streaming_mode = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231386] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] spice.zlib_compression = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231557] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] upgrade_levels.baseapi = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231736] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] upgrade_levels.compute = auto {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.231902] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] upgrade_levels.conductor = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232077] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] upgrade_levels.scheduler = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232247] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232413] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232577] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232735] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.232897] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233080] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233249] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233440] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233615] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vendordata_dynamic_auth.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233794] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.api_retry_count = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.233957] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.ca_file = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.234146] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.234319] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.cluster_name = testcl1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.234487] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.connection_pool_size = 10 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.234650] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.console_delay_seconds = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.234817] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.datastore_regex = ^datastore.* {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235047] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235230] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.host_password = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235401] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.host_port = 443 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235574] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.host_username = administrator@vsphere.local {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235788] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.insecure = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.235989] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.integration_bridge = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236180] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.maximum_objects = 100 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236344] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.pbm_default_policy = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236510] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.pbm_enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236668] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.pbm_wsdl_location = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236835] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.236993] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.serial_port_proxy_uri = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.237181] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.serial_port_service_uri = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.237349] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.task_poll_interval = 0.5 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.237523] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.use_linked_clone = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.237694] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.vnc_keymap = en-us {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.237863] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.vnc_port = 5900 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.238040] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vmware.vnc_port_total = 10000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.238235] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.auth_schemes = ['none'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.238412] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.238725] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.238909] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239093] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.novncproxy_port = 6080 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239286] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.server_listen = 127.0.0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239467] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239633] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.vencrypt_ca_certs = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239793] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.vencrypt_client_cert = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.239953] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vnc.vencrypt_client_key = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.240249] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.240388] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_deep_image_inspection = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.240559] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.240725] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.240890] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241079] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.disable_rootwrap = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241254] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.enable_numa_live_migration = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241427] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241587] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241748] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.241911] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.libvirt_disable_apic = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242085] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242254] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242418] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242583] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242744] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.242905] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243080] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243246] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243433] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243610] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243799] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.243966] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.244148] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] zvm.ca_file = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.244310] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] zvm.cloud_connector_url = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.244787] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.244974] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] zvm.reachable_timeout = 300 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.245182] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.245367] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.245551] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.connection_string = messaging:// {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.245741] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.enabled = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.245925] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.es_doc_type = notification {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246106] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.es_scroll_size = 10000 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246280] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.es_scroll_time = 2m {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246445] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.filter_error_trace = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246618] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.hmac_keys = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246788] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.sentinel_service_name = mymaster {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.246955] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.socket_timeout = 0.1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.247136] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.trace_requests = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.247301] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler.trace_sqlalchemy = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.247481] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler_jaeger.process_tags = {} {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.247665] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler_jaeger.service_name_prefix = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.247879] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] profiler_otlp.service_name_prefix = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248085] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248260] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248428] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248594] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248760] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.248923] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.249114] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.249291] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.249458] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.249632] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.249795] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250012] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250150] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250365] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250480] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250651] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250816] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.250981] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.251166] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.251388] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.251568] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.251739] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.251904] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252084] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252250] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252412] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252577] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252740] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.252910] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.253099] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.253273] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.253480] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.253664] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.253832] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254011] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254184] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254350] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254537] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254703] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_notifications.retry = -1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.254880] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255063] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255243] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.auth_section = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255410] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.auth_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255571] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.cafile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255730] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.certfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.255892] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.collect_timing = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256062] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.connect_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256228] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.connect_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256389] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_id = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256561] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256721] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_override = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.256879] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257060] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257272] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.endpoint_service_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257472] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.insecure = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257643] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.keyfile = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257804] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.max_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.257963] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.min_version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258141] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.region_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258305] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.retriable_status_codes = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258466] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.service_name = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258630] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.service_type = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258790] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.split_loggers = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.258946] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.status_code_retries = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259119] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.status_code_retry_delay = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259281] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.timeout = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259442] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.valid_interfaces = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259602] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_limit.version = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259769] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_reports.file_event_handler = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.259938] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260105] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] oslo_reports.log_dir = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260277] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260478] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260595] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260759] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.log_daemon_traceback = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.260925] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261113] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261292] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261499] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261674] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261835] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.261997] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.log_daemon_traceback = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.262180] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.262343] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.262501] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] vif_plug_ovs_privileged.user = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.262674] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.262856] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263039] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263216] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263424] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263609] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263784] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.263950] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.264151] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.264325] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.isolate_vif = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.264523] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.264700] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.264871] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] os_vif_ovs.per_port_bridge = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.capabilities = [21] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265565] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265738] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.helper_command = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.265851] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.log_daemon_traceback = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266028] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266199] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266356] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] privsep_osbrick.user = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266530] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266692] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.group = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.266849] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.helper_command = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.267017] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.log_daemon_traceback = False {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.267189] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.267351] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.267509] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] nova_sys_admin.user = None {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 673.267644] env[69796]: DEBUG oslo_service.backend.eventlet.service [None req-74d6e7da-b920-46ef-8184-b48716ce1b18 None None] ******************************************************************************** {{(pid=69796) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 673.268078] env[69796]: INFO nova.service [-] Starting compute node (version 31.0.1) [ 673.772269] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Getting list of instances from cluster (obj){ [ 673.772269] env[69796]: value = "domain-c8" [ 673.772269] env[69796]: _type = "ClusterComputeResource" [ 673.772269] env[69796]: } {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 673.773729] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f714127-ec54-4019-9bf5-a305e48c4733 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.783572] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Got total of 0 instances {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 673.784192] env[69796]: WARNING nova.virt.vmwareapi.driver [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 673.784717] env[69796]: INFO nova.virt.node [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Generated node identity dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 [ 673.784951] env[69796]: INFO nova.virt.node [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Wrote node identity dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 to /opt/stack/data/n-cpu-1/compute_id [ 674.287762] env[69796]: WARNING nova.compute.manager [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Compute nodes ['dc1d576d-f9a3-4db7-b636-fdf2129d2ab3'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 675.295783] env[69796]: INFO nova.compute.manager [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 676.302397] env[69796]: WARNING nova.compute.manager [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 676.302397] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.302800] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.302800] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.302973] env[69796]: DEBUG nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 676.304037] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04d13b2-9500-419f-810e-41b9eb2f011c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.313348] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d4e9cb-65fb-471b-99bb-82c5005d4184 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.334676] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdc7161-d5d0-48dd-b368-46f8fbd5ea3b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.342873] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46669df7-a72b-4d0e-900a-49dc68924a4e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.373945] env[69796]: DEBUG nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180774MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 676.374124] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.374249] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.876685] env[69796]: WARNING nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] No compute node record for cpu-1:dc1d576d-f9a3-4db7-b636-fdf2129d2ab3: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 could not be found. [ 677.380863] env[69796]: INFO nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 [ 678.889833] env[69796]: DEBUG nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 678.890271] env[69796]: DEBUG nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 679.048613] env[69796]: INFO nova.scheduler.client.report [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] [req-fa55841f-bcdb-4c32-9c17-0d15228337e2] Created resource provider record via placement API for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 679.064757] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6868dc9-551c-440f-af5a-345cc2ee6f6a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.073013] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e3473e-58d0-4b98-900d-c654e8b82b3b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.104222] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e633c6-1ed8-435f-8243-ea7ec38e15be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.112097] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a520d5-da53-4580-9686-3b93a23c8b66 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.126078] env[69796]: DEBUG nova.compute.provider_tree [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 679.666678] env[69796]: DEBUG nova.scheduler.client.report [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Updated inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 679.666908] env[69796]: DEBUG nova.compute.provider_tree [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Updating resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 generation from 0 to 1 during operation: update_inventory {{(pid=69796) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 679.667059] env[69796]: DEBUG nova.compute.provider_tree [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 679.719465] env[69796]: DEBUG nova.compute.provider_tree [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Updating resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 generation from 1 to 2 during operation: update_traits {{(pid=69796) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 680.223894] env[69796]: DEBUG nova.compute.resource_tracker [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69796) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 680.224297] env[69796]: DEBUG oslo_concurrency.lockutils [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.850s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.224338] env[69796]: DEBUG nova.service [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Creating RPC server for service compute {{(pid=69796) start /opt/stack/nova/nova/service.py:177}} [ 680.239674] env[69796]: DEBUG nova.service [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] Join ServiceGroup membership for this service compute {{(pid=69796) start /opt/stack/nova/nova/service.py:194}} [ 680.239905] env[69796]: DEBUG nova.servicegroup.drivers.db [None req-095d8134-9739-4f6c-af46-b035bda2d2a3 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69796) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 721.057167] env[69796]: INFO nova.utils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] The default thread pool MainProcess.default is initialized [ 721.057915] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.058131] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.560891] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.106680] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.108284] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.109406] env[69796]: INFO nova.compute.claims [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 722.135873] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.135873] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.319964] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "38792225-b054-4c08-b3ec-51d46287b0f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.320216] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.638908] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.824889] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 723.182636] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.228571] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cd848d-7a4f-49dd-a039-3d6621f020ff {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.246347] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc25d73-8061-43d6-a718-59ca08476dbb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.290788] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4938454-4a9e-4c04-9ac1-d116b67b25fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.296448] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.296690] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.306271] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13b9605-e31c-4b6d-b21f-6620a7e08dc4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.325367] env[69796]: DEBUG nova.compute.provider_tree [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.356021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.799586] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 723.833160] env[69796]: DEBUG nova.scheduler.client.report [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.341936] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.344336] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.236s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.344421] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.349530] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.166s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.350744] env[69796]: INFO nova.compute.claims [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.855583] env[69796]: DEBUG nova.compute.utils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 724.857772] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 724.857936] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 725.364570] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.477129] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd8fbef-32e4-4ffe-94d5-af60e3f2ed8a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.486272] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8b215a-27d0-4dc5-8189-a688360d6f64 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.525171] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e325d8ca-2bb4-4fca-ae65-21b84a2b73fc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.537911] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff7f539-8d68-463a-976b-2efae3c17561 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.561287] env[69796]: DEBUG nova.compute.provider_tree [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.604504] env[69796]: DEBUG nova.policy [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9647f914db0a47a29f7d54ee48ab7c76', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '21ad04776655492684df3fc7fabcbdd1', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 726.067358] env[69796]: DEBUG nova.scheduler.client.report [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.375037] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.419236] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.419490] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.419643] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.419819] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.419960] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.423849] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.424157] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.424320] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.424729] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.424899] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.425086] env[69796]: DEBUG nova.virt.hardware [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.426100] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8109be8f-3fff-4569-a1d0-51aad2201936 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.430090] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Successfully created port: dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 726.441442] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81d7233-257a-4d62-93cd-5cc4bf0c07c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.463846] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53164f1-1a00-431f-8846-41f25121b219 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.573979] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.574684] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.585225] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.231s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.589530] env[69796]: INFO nova.compute.claims [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 727.086973] env[69796]: DEBUG nova.compute.utils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.089821] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 727.089821] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.148382] env[69796]: DEBUG nova.policy [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '290ca53412f341f19b03c7e795b6e92b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d55c128d4cb4661acab17a3f13e0543', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.591504] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 727.692136] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fbe206-6a34-4352-90b9-62443d83b23d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.703414] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b93423-c3c7-421e-ab20-f44b496474c6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.746068] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338a687f-92e3-483f-a12e-51d7609bde9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.751997] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76072a79-e243-47c9-8478-884e5480a161 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.768795] env[69796]: DEBUG nova.compute.provider_tree [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.053034] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Successfully created port: 8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.280919] env[69796]: DEBUG nova.scheduler.client.report [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.607504] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.643164] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.643535] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.643715] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.644117] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.644426] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.644657] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.645049] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.645139] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.645310] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.645536] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.645620] env[69796]: DEBUG nova.virt.hardware [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.646520] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a248b4-5818-47e8-8e13-221e27b5a67e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.659782] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d0e082-fab2-4305-8069-1b76a721901a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.784757] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.785317] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 728.791306] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.450s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.792469] env[69796]: INFO nova.compute.claims [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.297414] env[69796]: DEBUG nova.compute.utils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 729.304219] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Not allocating networking since 'none' was specified. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 729.805433] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 729.927173] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e91cee-e58f-443b-add9-5ee83559e91f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.938069] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b65648b4-47c4-49c2-a062-b3a5cd14f4f3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.976370] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7347dc5e-9aab-47df-8860-d96ea43e24da {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.986336] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1886bc-5e6e-4484-bf18-b800fb127fff {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.005589] env[69796]: DEBUG nova.compute.provider_tree [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 730.212560] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Successfully updated port: dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.510622] env[69796]: DEBUG nova.scheduler.client.report [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 730.717852] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.718055] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquired lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.720010] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 730.818305] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 730.861641] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 730.862595] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.862595] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 730.862946] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.863105] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 730.863309] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 730.863500] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 730.863662] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 730.863815] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 730.864052] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 730.864173] env[69796]: DEBUG nova.virt.hardware [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 730.865550] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7c8d70-c5f9-44c5-87db-6148b221b1cf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.878904] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5127fe-c244-4d17-9b06-3f877d06b032 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.894309] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance VIF info [] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.904098] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.904535] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68442991-41b6-46de-a559-da62810a866a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.920146] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created folder: OpenStack in parent group-v4. [ 730.920417] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating folder: Project (35c2b0628fc741afb4f9d1fbc2ac608e). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.920822] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58d8863f-80d4-493d-a1fd-2a9dbad795c9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.933739] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created folder: Project (35c2b0628fc741afb4f9d1fbc2ac608e) in parent group-v837766. [ 730.933739] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating folder: Instances. Parent ref: group-v837767. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.933739] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d065fa7e-8070-42ce-bdfb-4c30a659fb59 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.945642] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created folder: Instances in parent group-v837767. [ 730.945642] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.945828] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.946053] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff9af124-04b9-4e01-b8f7-f28760676611 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.970646] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.970646] env[69796]: value = "task-4234214" [ 730.970646] env[69796]: _type = "Task" [ 730.970646] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.979805] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234214, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.019471] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.019471] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 731.032848] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Successfully updated port: 8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 731.332792] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.485275] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234214, 'name': CreateVM_Task, 'duration_secs': 0.325479} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.485678] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.487201] env[69796]: DEBUG oslo_vmware.service [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a152d7-c360-4c23-abda-d576b4b8fdca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.498465] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.498465] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.499087] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.499611] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad2c63da-f757-4376-8b90-02781f35498e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.506468] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 731.506468] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52bf250c-8759-2ef5-6c72-27f33d50742a" [ 731.506468] env[69796]: _type = "Task" [ 731.506468] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.518871] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52bf250c-8759-2ef5-6c72-27f33d50742a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.528416] env[69796]: DEBUG nova.compute.utils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 731.533343] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 731.533343] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 731.538241] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.541831] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquired lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.541831] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.746618] env[69796]: DEBUG nova.network.neutron [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Updating instance_info_cache with network_info: [{"id": "dcea5761-7cad-4443-a674-5ca2c4994581", "address": "fa:16:3e:bc:5e:f3", "network": {"id": "c97422c4-81ec-4cc9-9b9e-6aa1ed594196", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-239384121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21ad04776655492684df3fc7fabcbdd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcea5761-7c", "ovs_interfaceid": "dcea5761-7cad-4443-a674-5ca2c4994581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.915982] env[69796]: DEBUG nova.compute.manager [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Received event network-vif-plugged-dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 731.915982] env[69796]: DEBUG oslo_concurrency.lockutils [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.916674] env[69796]: DEBUG oslo_concurrency.lockutils [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.916674] env[69796]: DEBUG oslo_concurrency.lockutils [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.916674] env[69796]: DEBUG nova.compute.manager [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] No waiting events found dispatching network-vif-plugged-dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 731.916815] env[69796]: WARNING nova.compute.manager [req-03843a28-47c8-4f21-8c08-3a14f454b633 req-9637984d-0e3a-4f5e-8db2-e4d656a5155f service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Received unexpected event network-vif-plugged-dcea5761-7cad-4443-a674-5ca2c4994581 for instance with vm_state building and task_state spawning. [ 731.920134] env[69796]: DEBUG nova.policy [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e9022e006204de38e1f2259a5a23d3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ec450f51915940ceafe12b73ea82353f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 732.020753] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.021013] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.021782] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.021782] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.021945] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.022534] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27d43762-135b-46c2-bcee-be70ca4c4234 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.033548] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.033608] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.034802] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395b1a4e-88f0-4650-9022-4c36655435e6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.038715] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 732.050148] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1a9f958-530c-4031-8e10-e965b16484a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.057768] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 732.057768] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523d9dd5-8340-c75a-0478-5fdaa023aec6" [ 732.057768] env[69796]: _type = "Task" [ 732.057768] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.068538] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523d9dd5-8340-c75a-0478-5fdaa023aec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.121383] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 732.249329] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Releasing lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.249800] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Instance network_info: |[{"id": "dcea5761-7cad-4443-a674-5ca2c4994581", "address": "fa:16:3e:bc:5e:f3", "network": {"id": "c97422c4-81ec-4cc9-9b9e-6aa1ed594196", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-239384121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21ad04776655492684df3fc7fabcbdd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcea5761-7c", "ovs_interfaceid": "dcea5761-7cad-4443-a674-5ca2c4994581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 732.250283] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:5e:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8059554c-499f-44b4-be06-29f80ec36b34', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcea5761-7cad-4443-a674-5ca2c4994581', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.260065] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Creating folder: Project (21ad04776655492684df3fc7fabcbdd1). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.261757] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3ecd516-1ff4-4cf2-9991-e4ffae747620 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.275612] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Created folder: Project (21ad04776655492684df3fc7fabcbdd1) in parent group-v837766. [ 732.275612] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Creating folder: Instances. Parent ref: group-v837770. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.275612] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74220af8-1b98-426e-a64b-445f7c08cd11 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.292252] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Created folder: Instances in parent group-v837770. [ 732.292252] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.294928] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.296034] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faca926f-3daf-47a4-8e0f-f7b02bce8ee0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.320527] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.320527] env[69796]: value = "task-4234217" [ 732.320527] env[69796]: _type = "Task" [ 732.320527] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.329604] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234217, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.526823] env[69796]: DEBUG nova.network.neutron [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Updating instance_info_cache with network_info: [{"id": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "address": "fa:16:3e:ea:7b:8a", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8a7e47-3d", "ovs_interfaceid": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.574284] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Preparing fetch location {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 732.574340] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating directory with path [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.575132] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01a3b345-f8d6-4197-a233-6d1fda2f26de {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.605950] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created directory with path [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.607162] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Fetch image to [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 732.610614] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Downloading image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk on the data store datastore2 {{(pid=69796) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 732.611622] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb849a34-b395-4e79-9691-a9d8db99ceed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.625131] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda56ebb-bfa5-4555-a851-fb3b7554b6ee {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.644318] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0af6bd-54fa-408a-b843-94e97c65ce52 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.694011] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2333fd04-3946-40c7-aca4-845e106c8b4b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.702264] env[69796]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fab9ed7a-e843-4e63-b87a-652da249105e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.797279] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Downloading image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to the data store datastore2 {{(pid=69796) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 732.833752] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234217, 'name': CreateVM_Task, 'duration_secs': 0.381871} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.840033] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.853624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.853624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.853624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.853624] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb292472-29d0-4454-882a-a3f70a7ff6dd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.864020] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 732.864020] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52758e47-e9fe-97a7-c1c4-03b608b835f4" [ 732.864020] env[69796]: _type = "Task" [ 732.864020] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.883073] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.883073] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.883073] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.033830] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Releasing lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.033830] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Instance network_info: |[{"id": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "address": "fa:16:3e:ea:7b:8a", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8a7e47-3d", "ovs_interfaceid": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 733.034328] env[69796]: DEBUG oslo_vmware.rw_handles [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69796) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 733.036903] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:7b:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee9f433-666e-4d74-96df-c7c7a6ac7fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a8a7e47-3d72-4d40-b819-2d51cd634de6', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.047758] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Creating folder: Project (4d55c128d4cb4661acab17a3f13e0543). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.051335] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fbfbc582-a711-4156-ae10-4b98b6315ffc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.054967] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 733.125336] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.125336] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.132055] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Created folder: Project (4d55c128d4cb4661acab17a3f13e0543) in parent group-v837766. [ 733.132055] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Creating folder: Instances. Parent ref: group-v837773. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 733.135571] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3d78b15-49db-44b2-b301-67ee49eb81c8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.154020] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Created folder: Instances in parent group-v837773. [ 733.154020] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.154020] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.154020] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0b11681-a0d8-4545-a742-5d15550e359c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.180033] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.180033] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.180033] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.180371] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.180371] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.180371] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.180371] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.180691] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.180996] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.181333] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.181637] env[69796]: DEBUG nova.virt.hardware [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.183349] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276896f0-160f-4b13-a3f7-a41ea671b1cd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.199130] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 733.199130] env[69796]: value = "task-4234220" [ 733.199130] env[69796]: _type = "Task" [ 733.199130] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.211434] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b997cf-7ca2-43c1-a497-14d27d3324f9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.223628] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234220, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.510405] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Successfully created port: 6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.630354] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 733.723355] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234220, 'name': CreateVM_Task, 'duration_secs': 0.397643} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.725783] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 733.728982] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.729780] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.729780] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 733.730353] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca24311-ac30-4ed5-a2fe-f0bbd1ff1b0b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.744228] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 733.744228] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5225139d-ac7b-d58d-500b-feae8e72366f" [ 733.744228] env[69796]: _type = "Task" [ 733.744228] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.756837] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5225139d-ac7b-d58d-500b-feae8e72366f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.775942] env[69796]: DEBUG oslo_vmware.rw_handles [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Completed reading data from the image iterator. {{(pid=69796) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 733.775942] env[69796]: DEBUG oslo_vmware.rw_handles [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69796) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 733.903192] env[69796]: DEBUG nova.compute.manager [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Received event network-vif-plugged-8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 733.903417] env[69796]: DEBUG oslo_concurrency.lockutils [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] Acquiring lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.903623] env[69796]: DEBUG oslo_concurrency.lockutils [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 733.903791] env[69796]: DEBUG oslo_concurrency.lockutils [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.903948] env[69796]: DEBUG nova.compute.manager [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] No waiting events found dispatching network-vif-plugged-8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 733.904193] env[69796]: WARNING nova.compute.manager [req-30371be7-6352-4f90-b1cb-48ba3897dcb6 req-76b567e3-bbf1-47a7-8086-952232648249 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Received unexpected event network-vif-plugged-8a8a7e47-3d72-4d40-b819-2d51cd634de6 for instance with vm_state building and task_state spawning. [ 733.928011] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Downloaded image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk on the data store datastore2 {{(pid=69796) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 733.929900] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Caching image {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 733.931412] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copying Virtual Disk [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk to [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.931412] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dae0c5db-1255-4a04-b37c-fe6c2db02ba0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.942486] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 733.942486] env[69796]: value = "task-4234221" [ 733.942486] env[69796]: _type = "Task" [ 733.942486] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.954702] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.163916] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.164342] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.166732] env[69796]: INFO nova.compute.claims [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.258569] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.258805] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.259100] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.454169] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234221, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.459182] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "a4a16667-cd00-4850-9389-0bd57c7efd74" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.459450] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.958724] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689261} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.959088] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copied Virtual Disk [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk to [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.959285] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleting the datastore file [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 734.959557] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-001e90c2-2216-444f-87d8-6b4d8583bd7d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.962591] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 734.972735] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 734.972735] env[69796]: value = "task-4234222" [ 734.972735] env[69796]: _type = "Task" [ 734.972735] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.984240] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.242431] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.242736] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.246034] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.246034] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.246034] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.246034] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.246034] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_power_states {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.339808] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b1059a-93d1-44a8-9942-59c18063cb8e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.353939] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5048adbf-de63-4865-a5b4-35ffc909401f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.403091] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f629f5-e565-4bc2-8706-12b6534ebe4b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.413294] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14757ef3-7fa4-4cb5-80a0-9a3a0a8dc09b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.432741] env[69796]: DEBUG nova.compute.provider_tree [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.486291] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02882} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.486291] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.486291] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Moving file from [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946/11e211db-44f8-4e34-8fec-8b87ab3fce6f to [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f. {{(pid=69796) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 735.486291] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-18d7a447-5cf2-4d37-9402-05a63370c023 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.493735] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.503744] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 735.503744] env[69796]: value = "task-4234223" [ 735.503744] env[69796]: _type = "Task" [ 735.503744] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.515556] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234223, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.530887] env[69796]: DEBUG nova.compute.manager [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Received event network-changed-dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 735.531102] env[69796]: DEBUG nova.compute.manager [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Refreshing instance network info cache due to event network-changed-dcea5761-7cad-4443-a674-5ca2c4994581. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 735.531310] env[69796]: DEBUG oslo_concurrency.lockutils [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] Acquiring lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.531444] env[69796]: DEBUG oslo_concurrency.lockutils [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] Acquired lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.531599] env[69796]: DEBUG nova.network.neutron [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Refreshing network info cache for port dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.750642] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Getting list of instances from cluster (obj){ [ 735.750642] env[69796]: value = "domain-c8" [ 735.750642] env[69796]: _type = "ClusterComputeResource" [ 735.750642] env[69796]: } {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 735.751740] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab066aa0-7205-4a2c-9a9b-5ca338b1afcd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.768265] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Got total of 3 instances {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 735.768489] env[69796]: WARNING nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] While synchronizing instance power states, found 5 instances in the database and 3 instances on the hypervisor. [ 735.769432] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 735.769721] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid d0e1a7df-f83f-43c2-a387-d2a378ff31b6 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 735.769972] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 735.770197] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 47005af8-11fe-498f-9b67-e0316faeeb8f {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 735.771442] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 836605ee-50cb-48b0-ba2e-33db3832f8ba {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 735.771814] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.772101] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.772326] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "38792225-b054-4c08-b3ec-51d46287b0f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.772534] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.772729] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.772903] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.773143] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 735.773306] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 735.939353] env[69796]: DEBUG nova.scheduler.client.report [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.017872] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234223, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.036134} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.018341] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] File moved {{(pid=69796) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 736.018341] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Cleaning up location [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946 {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 736.018716] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleting the datastore file [datastore2] vmware_temp/6fb49971-7970-4e44-87bd-853106a3b946 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 736.018716] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1aec9972-5a24-4c60-b5c0-8517c8694cdb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.031040] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 736.031040] env[69796]: value = "task-4234224" [ 736.031040] env[69796]: _type = "Task" [ 736.031040] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.044129] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.276863] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.446129] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.446885] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.450625] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.957s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.452855] env[69796]: INFO nova.compute.claims [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 736.552284] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027895} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.552881] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 736.555095] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2ecb5a3-aefd-401b-b616-16cb76a6d2f4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.564695] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 736.564695] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52138b8b-70e1-5518-83e1-ecf43b7de484" [ 736.564695] env[69796]: _type = "Task" [ 736.564695] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.582296] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52138b8b-70e1-5518-83e1-ecf43b7de484, 'name': SearchDatastore_Task, 'duration_secs': 0.011201} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.582296] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.582296] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.582628] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.582834] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 736.584180] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b167a399-b3c0-4645-9817-c0958cc35a0c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.589233] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-643d323b-c595-442b-9b34-a33693467e25 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.601767] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 736.601767] env[69796]: value = "task-4234225" [ 736.601767] env[69796]: _type = "Task" [ 736.601767] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.602444] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 736.603051] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 736.611145] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37986adc-ca3f-4dfa-877d-22946cc54849 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.623721] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.623721] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 736.623721] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52a58b5e-2e4d-62e2-5c88-b3d5d6499e7f" [ 736.623721] env[69796]: _type = "Task" [ 736.623721] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.632508] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52a58b5e-2e4d-62e2-5c88-b3d5d6499e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.675114] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "7f37f6c9-adba-4292-9d47-c455f77e539f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.675114] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.967775] env[69796]: DEBUG nova.compute.utils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 736.967775] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 736.967775] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 737.119120] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234225, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.128409] env[69796]: DEBUG nova.network.neutron [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Updated VIF entry in instance network info cache for port dcea5761-7cad-4443-a674-5ca2c4994581. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 737.128952] env[69796]: DEBUG nova.network.neutron [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Updating instance_info_cache with network_info: [{"id": "dcea5761-7cad-4443-a674-5ca2c4994581", "address": "fa:16:3e:bc:5e:f3", "network": {"id": "c97422c4-81ec-4cc9-9b9e-6aa1ed594196", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-239384121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "21ad04776655492684df3fc7fabcbdd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8059554c-499f-44b4-be06-29f80ec36b34", "external-id": "nsx-vlan-transportzone-892", "segmentation_id": 892, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcea5761-7c", "ovs_interfaceid": "dcea5761-7cad-4443-a674-5ca2c4994581", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.139652] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52a58b5e-2e4d-62e2-5c88-b3d5d6499e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.018902} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.142021] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42fff5ab-6a1c-44dc-baf7-00e6a48cfe42 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.150318] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 737.150318] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526c8ce1-aa52-1d32-8a83-bb65520c7d8d" [ 737.150318] env[69796]: _type = "Task" [ 737.150318] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.166043] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526c8ce1-aa52-1d32-8a83-bb65520c7d8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.178099] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.318828] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "8b103adc-9903-406f-8fd1-e193e00cde11" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.319256] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.456882] env[69796]: DEBUG nova.policy [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'be7216f2cb2b4b25bdbb1dad6cb6b5fc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54a54b9449f347b99d71c1bc2029cbeb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 737.472591] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.623752] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663049} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.627192] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.627429] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.627863] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-870f1728-6a6b-4536-948a-8fdb107e248d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.632368] env[69796]: DEBUG oslo_concurrency.lockutils [req-40824041-fdda-49cc-841f-47f8b93b3a08 req-777557dc-0fd1-4d5e-a8dc-bbd939d2b78d service nova] Releasing lock "refresh_cache-47f223c0-12b0-4eda-ab42-81fe8b95afac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.636465] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 737.636465] env[69796]: value = "task-4234226" [ 737.636465] env[69796]: _type = "Task" [ 737.636465] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.658517] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.671552] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526c8ce1-aa52-1d32-8a83-bb65520c7d8d, 'name': SearchDatastore_Task, 'duration_secs': 0.053136} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.671818] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.672097] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 47f223c0-12b0-4eda-ab42-81fe8b95afac/47f223c0-12b0-4eda-ab42-81fe8b95afac.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.672385] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.672571] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.672784] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab8b1cb6-fbd9-4197-92c9-505702df8b86 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.675288] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-189a37ad-0d1d-4498-961e-0d96edcec3f3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.683054] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5a08679-a332-42f0-951f-7133797342b8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.690214] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.690656] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.691835] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 737.691835] env[69796]: value = "task-4234227" [ 737.691835] env[69796]: _type = "Task" [ 737.691835] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.695951] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48ffb608-1454-48b5-bb3b-2c43bce3bf67 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.715376] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234227, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.718446] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76675e52-b1a6-4396-a4bf-27679516798c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.722317] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 737.722317] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f81ddc-a9b8-6a6c-a5ef-60a7a8a475ff" [ 737.722317] env[69796]: _type = "Task" [ 737.722317] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.723613] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.756538] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3d5b09-e022-45f8-ad31-97e588c160dc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.763044] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f81ddc-a9b8-6a6c-a5ef-60a7a8a475ff, 'name': SearchDatastore_Task, 'duration_secs': 0.014896} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.764305] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac1a2515-350d-4690-ad3b-fdd8ee9853d8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.770608] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865c2ef5-9dc5-4be4-99e3-91c39bc4e899 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.775737] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 737.775737] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]527c5281-2fb9-6713-5db2-2b588ad79d80" [ 737.775737] env[69796]: _type = "Task" [ 737.775737] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.789865] env[69796]: DEBUG nova.compute.provider_tree [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.800540] env[69796]: DEBUG nova.compute.manager [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Received event network-changed-8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 737.800888] env[69796]: DEBUG nova.compute.manager [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Refreshing instance network info cache due to event network-changed-8a8a7e47-3d72-4d40-b819-2d51cd634de6. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 737.800962] env[69796]: DEBUG oslo_concurrency.lockutils [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] Acquiring lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.801075] env[69796]: DEBUG oslo_concurrency.lockutils [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] Acquired lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.801242] env[69796]: DEBUG nova.network.neutron [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Refreshing network info cache for port 8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 737.802652] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]527c5281-2fb9-6713-5db2-2b588ad79d80, 'name': SearchDatastore_Task, 'duration_secs': 0.013936} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.803093] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.803334] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d0e1a7df-f83f-43c2-a387-d2a378ff31b6/d0e1a7df-f83f-43c2-a387-d2a378ff31b6.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.803764] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f98701d0-ba3c-4d37-a811-9efe396f9869 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.813551] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 737.813551] env[69796]: value = "task-4234228" [ 737.813551] env[69796]: _type = "Task" [ 737.813551] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.823866] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.826764] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234228, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.031567] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Successfully updated port: 6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 738.153667] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.253323} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.154429] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.154969] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17dadb2-62f4-445b-855f-da26e7c746f4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.179678] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.179678] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-697b16a3-ef90-4946-9ce8-e263c4dfa307 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.206116] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 738.206116] env[69796]: value = "task-4234229" [ 738.206116] env[69796]: _type = "Task" [ 738.206116] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.212770] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234227, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.219694] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234229, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.299388] env[69796]: DEBUG nova.scheduler.client.report [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.332992] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234228, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.368638] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.485061] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.549865] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.550052] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquired lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.550273] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.568530] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.568898] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.569664] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.569950] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.570152] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.570357] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.570602] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.570760] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.570947] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.571186] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.571439] env[69796]: DEBUG nova.virt.hardware [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.572897] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2d51b2-520f-4b23-82c6-e6c22e32d08e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.586044] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddaa695d-8067-457b-8ef7-cc846e01f66f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.722281] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234227, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.650482} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.724654] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 47f223c0-12b0-4eda-ab42-81fe8b95afac/47f223c0-12b0-4eda-ab42-81fe8b95afac.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.724896] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.725189] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234229, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.725439] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ef1ada7-ba2c-4bd8-8e70-24a38f16184c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.733201] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 738.733201] env[69796]: value = "task-4234230" [ 738.733201] env[69796]: _type = "Task" [ 738.733201] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.750700] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.812235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.813814] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 738.821527] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.543s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.821527] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.821527] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 738.821527] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.097s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.825937] env[69796]: INFO nova.compute.claims [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.830058] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f6e8b6-cad7-48f0-b977-9ac56a4eab6d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.844988] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234228, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.885709} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.845137] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d0e1a7df-f83f-43c2-a387-d2a378ff31b6/d0e1a7df-f83f-43c2-a387-d2a378ff31b6.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.845352] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.847312] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb4efe9-94a4-4e27-baba-07d1621a1576 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.853272] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cb49932-7df5-42cf-8ab2-f11750ebe52a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.869536] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cb1840-5724-4d1b-8708-9c028928e1c1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.874093] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 738.874093] env[69796]: value = "task-4234231" [ 738.874093] env[69796]: _type = "Task" [ 738.874093] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.881416] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3a935f-d865-423b-ba0a-954df5defea8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.890314] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234231, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.927494] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180759MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 738.927494] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.181338] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 739.220809] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234229, 'name': ReconfigVM_Task, 'duration_secs': 0.789} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.225380] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.226709] env[69796]: DEBUG nova.compute.manager [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Received event network-vif-plugged-6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 739.226998] env[69796]: DEBUG oslo_concurrency.lockutils [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.227297] env[69796]: DEBUG oslo_concurrency.lockutils [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.227518] env[69796]: DEBUG oslo_concurrency.lockutils [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.227732] env[69796]: DEBUG nova.compute.manager [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] No waiting events found dispatching network-vif-plugged-6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 739.227967] env[69796]: WARNING nova.compute.manager [req-3d2b478c-3cb1-48ac-a75c-74b8d7e32c21 req-b3e08582-dbc7-40f9-9573-4086581731a2 service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Received unexpected event network-vif-plugged-6196c31c-6907-4695-91cf-2de0c3cac58f for instance with vm_state building and task_state spawning. [ 739.228710] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c21df36-5ed1-486b-b6fb-507c3f2b52d2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.240091] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 739.240091] env[69796]: value = "task-4234232" [ 739.240091] env[69796]: _type = "Task" [ 739.240091] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.249686] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085269} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.253798] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.254240] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c40c9f0-eba3-4f6e-b05c-7ed837ebe29a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.265642] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234232, 'name': Rename_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.286832] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 47f223c0-12b0-4eda-ab42-81fe8b95afac/47f223c0-12b0-4eda-ab42-81fe8b95afac.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.287186] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f42b5a0b-1a77-402c-9d85-a290a62b24ca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.309134] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 739.309134] env[69796]: value = "task-4234233" [ 739.309134] env[69796]: _type = "Task" [ 739.309134] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.319127] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.321627] env[69796]: DEBUG nova.compute.utils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 739.323133] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 739.323303] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 739.387012] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234231, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088266} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.387414] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 739.388873] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f857051-aff3-4ff6-b253-afdafde0d4a0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.414657] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] d0e1a7df-f83f-43c2-a387-d2a378ff31b6/d0e1a7df-f83f-43c2-a387-d2a378ff31b6.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 739.417942] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae2be766-bb02-49d4-8f41-ee5708bdfe99 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.439947] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 739.439947] env[69796]: value = "task-4234234" [ 739.439947] env[69796]: _type = "Task" [ 739.439947] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.450481] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.654889] env[69796]: DEBUG nova.policy [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7a4ce6b0f1547a49500828b8eaa8ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '44db36ccd9b549969a2aa33ede90ed0c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 739.755777] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234232, 'name': Rename_Task, 'duration_secs': 0.158642} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.756095] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 739.756351] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e76dff3-e6b6-4974-a38a-20bf8262eaef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.767224] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 739.767224] env[69796]: value = "task-4234235" [ 739.767224] env[69796]: _type = "Task" [ 739.767224] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.778390] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.820631] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234233, 'name': ReconfigVM_Task, 'duration_secs': 0.373052} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.821016] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 47f223c0-12b0-4eda-ab42-81fe8b95afac/47f223c0-12b0-4eda-ab42-81fe8b95afac.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.821728] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3365f7ae-7846-4af6-9d75-6ce5d9d41de9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.826615] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 739.831714] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 739.831714] env[69796]: value = "task-4234236" [ 739.831714] env[69796]: _type = "Task" [ 739.831714] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.847393] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234236, 'name': Rename_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.961038] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234234, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.025160] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeebcb5-42e7-46ed-9234-5cc16da1c3b9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.034894] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f45aef9-ff62-4b43-a3a9-1097e2b0fc83 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.073382] env[69796]: DEBUG nova.network.neutron [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Updating instance_info_cache with network_info: [{"id": "6196c31c-6907-4695-91cf-2de0c3cac58f", "address": "fa:16:3e:33:8b:34", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6196c31c-69", "ovs_interfaceid": "6196c31c-6907-4695-91cf-2de0c3cac58f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.075307] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969a0f6d-af58-4577-804c-ad1f64f9ee04 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.090314] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16960de8-acbe-42f1-8255-10631247b6e4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.113803] env[69796]: DEBUG nova.compute.provider_tree [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.241516] env[69796]: DEBUG nova.network.neutron [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Updated VIF entry in instance network info cache for port 8a8a7e47-3d72-4d40-b819-2d51cd634de6. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 740.242130] env[69796]: DEBUG nova.network.neutron [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Updating instance_info_cache with network_info: [{"id": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "address": "fa:16:3e:ea:7b:8a", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.146", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a8a7e47-3d", "ovs_interfaceid": "8a8a7e47-3d72-4d40-b819-2d51cd634de6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.252722] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Successfully created port: 65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.277750] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234235, 'name': PowerOnVM_Task} progress is 95%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.346597] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234236, 'name': Rename_Task, 'duration_secs': 0.17813} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.346597] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.346863] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c4079ee-ffce-47d2-949f-928b2c19828a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.354709] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 740.354709] env[69796]: value = "task-4234237" [ 740.354709] env[69796]: _type = "Task" [ 740.354709] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.366414] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234237, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.455118] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234234, 'name': ReconfigVM_Task, 'duration_secs': 0.564573} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.455468] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Reconfigured VM instance instance-00000002 to attach disk [datastore2] d0e1a7df-f83f-43c2-a387-d2a378ff31b6/d0e1a7df-f83f-43c2-a387-d2a378ff31b6.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 740.458352] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbf7a415-d0fb-4033-950b-522d945caa52 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.465224] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 740.465224] env[69796]: value = "task-4234238" [ 740.465224] env[69796]: _type = "Task" [ 740.465224] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.476287] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234238, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.580116] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Releasing lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.580488] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Instance network_info: |[{"id": "6196c31c-6907-4695-91cf-2de0c3cac58f", "address": "fa:16:3e:33:8b:34", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6196c31c-69", "ovs_interfaceid": "6196c31c-6907-4695-91cf-2de0c3cac58f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 740.580969] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:8b:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee9f433-666e-4d74-96df-c7c7a6ac7fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6196c31c-6907-4695-91cf-2de0c3cac58f', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 740.591676] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Creating folder: Project (ec450f51915940ceafe12b73ea82353f). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.592328] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99f190f6-ecb2-4afa-8974-e631b25403b1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.607410] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Created folder: Project (ec450f51915940ceafe12b73ea82353f) in parent group-v837766. [ 740.607643] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Creating folder: Instances. Parent ref: group-v837776. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 740.607915] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0166ff25-26d4-4f02-a826-b76094529c56 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.621410] env[69796]: DEBUG nova.scheduler.client.report [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.625488] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Created folder: Instances in parent group-v837776. [ 740.626053] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.626782] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 740.627757] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5792b77-716d-4733-b2d7-71c24a4950fc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.652563] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 740.652563] env[69796]: value = "task-4234241" [ 740.652563] env[69796]: _type = "Task" [ 740.652563] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.667063] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234241, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.747047] env[69796]: DEBUG oslo_concurrency.lockutils [req-5c4d249e-7dc2-48c6-9310-e6b229ccd218 req-8cf0d6c0-f9a1-4c87-b8c4-f05bcf478a87 service nova] Releasing lock "refresh_cache-d0e1a7df-f83f-43c2-a387-d2a378ff31b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.782012] env[69796]: DEBUG oslo_vmware.api [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234235, 'name': PowerOnVM_Task, 'duration_secs': 0.585819} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.782358] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 740.783656] env[69796]: INFO nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Took 9.96 seconds to spawn the instance on the hypervisor. [ 740.783944] env[69796]: DEBUG nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 740.785044] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d684f6-29a0-4ce4-bb54-afedca359784 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.841408] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 740.869341] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234237, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.902144] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 740.902144] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 740.902144] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 740.902525] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 740.902582] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 740.902716] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 740.902942] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 740.903113] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 740.903302] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 740.903502] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 740.903731] env[69796]: DEBUG nova.virt.hardware [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 740.910215] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0b0453-8399-42ed-9294-322917467604 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.922810] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33a2b66-2c23-403a-8ab2-6ba7b316e1d2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.984012] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234238, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.128081] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.128653] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.134438] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.766s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.136071] env[69796]: INFO nova.compute.claims [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.165982] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234241, 'name': CreateVM_Task, 'duration_secs': 0.41672} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.165982] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 741.166811] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.167015] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.167379] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 741.167680] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1764ea3-4e53-4bdc-bb3c-3418ac268ab9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.173936] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 741.173936] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528459a7-29b1-896b-0c6a-74ccf9639b8b" [ 741.173936] env[69796]: _type = "Task" [ 741.173936] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.188369] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528459a7-29b1-896b-0c6a-74ccf9639b8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.291515] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Successfully created port: 327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.315715] env[69796]: INFO nova.compute.manager [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Took 17.99 seconds to build instance. [ 741.370353] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234237, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.481696] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234238, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.526897] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Acquiring lock "dd581b1c-35ba-44d3-80b5-49950fc49f5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.529674] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Lock "dd581b1c-35ba-44d3-80b5-49950fc49f5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.638053] env[69796]: DEBUG nova.compute.utils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.642122] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Not allocating networking since 'none' was specified. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 741.692531] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528459a7-29b1-896b-0c6a-74ccf9639b8b, 'name': SearchDatastore_Task, 'duration_secs': 0.023998} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.693684] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.693779] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 741.693999] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.694304] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 741.694393] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 741.695402] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9fc49fb-815e-4c8b-9bc5-c48634a0f4be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.708449] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 741.710112] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 741.710112] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c62ecbcf-b522-4bb6-8f06-7a78b0cc0607 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.718717] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 741.718717] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529cf9c9-4f19-1874-fc0b-9806449f1cb0" [ 741.718717] env[69796]: _type = "Task" [ 741.718717] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.738392] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529cf9c9-4f19-1874-fc0b-9806449f1cb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.818274] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fe3399f1-15ad-4cc4-b37c-6384ef061caf tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.498s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.819127] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.046s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.819127] env[69796]: INFO nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] During sync_power_state the instance has a pending task (spawning). Skip. [ 741.819127] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.867845] env[69796]: DEBUG oslo_vmware.api [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234237, 'name': PowerOnVM_Task, 'duration_secs': 1.087617} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.868790] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.868790] env[69796]: INFO nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Took 15.49 seconds to spawn the instance on the hypervisor. [ 741.869443] env[69796]: DEBUG nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 741.870642] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bcb4a3-5f51-450a-b5ab-562c63707011 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.987719] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234238, 'name': Rename_Task, 'duration_secs': 1.241557} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.989737] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 741.989737] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb80cce0-965e-45a0-9b34-83e9be55c0fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.999119] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Waiting for the task: (returnval){ [ 741.999119] env[69796]: value = "task-4234242" [ 741.999119] env[69796]: _type = "Task" [ 741.999119] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.017568] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.031809] env[69796]: DEBUG nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 742.143145] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 742.235228] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529cf9c9-4f19-1874-fc0b-9806449f1cb0, 'name': SearchDatastore_Task, 'duration_secs': 0.030375} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.236182] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24d77e2a-b027-4015-b38c-b4c1459bd160 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.247020] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 742.247020] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529a19b7-b796-e92f-4cbb-9e31fcefe441" [ 742.247020] env[69796]: _type = "Task" [ 742.247020] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.255036] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529a19b7-b796-e92f-4cbb-9e31fcefe441, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.401101] env[69796]: INFO nova.compute.manager [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Took 20.33 seconds to build instance. [ 742.428962] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3912a5dc-4bce-4efb-8b45-377459beafd9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.439597] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d841037f-e92a-4674-82ec-6fde8eb0f4bf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.479951] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f9eedf-92bc-4a18-b805-3943b58583db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.487490] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Acquiring lock "5abf4fa3-bb75-47ab-bd8a-4b181e840946" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.487887] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Lock "5abf4fa3-bb75-47ab-bd8a-4b181e840946" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.493701] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96af694-82f6-4e07-8064-393501d9042f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.515796] env[69796]: DEBUG nova.compute.provider_tree [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.524618] env[69796]: DEBUG oslo_vmware.api [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Task: {'id': task-4234242, 'name': PowerOnVM_Task, 'duration_secs': 0.525743} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.524975] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 742.525238] env[69796]: INFO nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Took 13.92 seconds to spawn the instance on the hypervisor. [ 742.525500] env[69796]: DEBUG nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 742.526467] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb7ba33-70c2-4be6-86b6-523c31bbb13b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.554182] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.758255] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529a19b7-b796-e92f-4cbb-9e31fcefe441, 'name': SearchDatastore_Task, 'duration_secs': 0.018335} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.758255] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.758255] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 47005af8-11fe-498f-9b67-e0316faeeb8f/47005af8-11fe-498f-9b67-e0316faeeb8f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 742.758922] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae50f9b8-7d45-4878-8b8c-5ea1b2cdd2a0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.767244] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 742.767244] env[69796]: value = "task-4234243" [ 742.767244] env[69796]: _type = "Task" [ 742.767244] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.780372] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234243, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.901654] env[69796]: DEBUG oslo_concurrency.lockutils [None req-38f62c80-39e7-48ef-bb12-270671e3e6cc tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.842s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.902204] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.130s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.902256] env[69796]: INFO nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] During sync_power_state the instance has a pending task (spawning). Skip. [ 742.902705] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.993126] env[69796]: DEBUG nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.020274] env[69796]: DEBUG nova.scheduler.client.report [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.052471] env[69796]: INFO nova.compute.manager [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Took 19.90 seconds to build instance. [ 743.153653] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 743.191146] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 743.191444] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 743.191670] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 743.191733] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 743.191866] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 743.192023] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 743.192305] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 743.193749] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 743.193749] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 743.193749] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 743.193749] env[69796]: DEBUG nova.virt.hardware [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 743.194307] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82523688-43e7-475e-a48d-6325f19ca23c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.207194] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc93afb-2b26-48dc-bb9f-293b52e295df {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.235466] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Instance VIF info [] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 743.246839] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Creating folder: Project (335c147694e94d67be5aa4f7efb75ad2). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.246839] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ed178b6-f953-41f6-8c36-c10b88cd8f46 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.260820] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Created folder: Project (335c147694e94d67be5aa4f7efb75ad2) in parent group-v837766. [ 743.261075] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Creating folder: Instances. Parent ref: group-v837779. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 743.261305] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96299e9a-f004-47db-ab07-d57a85de7363 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.275441] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Created folder: Instances in parent group-v837779. [ 743.275441] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.277617] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 743.277941] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4692d035-e6b1-4468-9796-5d9701dcc0ac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.298555] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234243, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.307896] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 743.307896] env[69796]: value = "task-4234246" [ 743.307896] env[69796]: _type = "Task" [ 743.307896] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.318264] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234246, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.340340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Acquiring lock "64ab714b-61b6-48be-a2cb-5a5df86f7512" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.340340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Lock "64ab714b-61b6-48be-a2cb-5a5df86f7512" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.523828] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.527464] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.392s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.527464] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.530912] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.605s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.562340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2d88e5cd-fee0-405b-9fee-ee9cb72f039e tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.422s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.562340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.784s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.562340] env[69796]: INFO nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] During sync_power_state the instance has a pending task (spawning). Skip. [ 743.562340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.781645] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234243, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680008} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.782157] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 47005af8-11fe-498f-9b67-e0316faeeb8f/47005af8-11fe-498f-9b67-e0316faeeb8f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.782298] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.782961] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2080b1ad-ab9e-4bc1-83dc-caedbcd41dd8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.792447] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 743.792447] env[69796]: value = "task-4234247" [ 743.792447] env[69796]: _type = "Task" [ 743.792447] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.804509] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.823630] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234246, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.843523] env[69796]: DEBUG nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.894029] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Successfully updated port: 65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.038572] env[69796]: DEBUG nova.compute.utils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.045440] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.045700] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.136180] env[69796]: DEBUG nova.policy [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f4ef10b8b1604725b5ed21e018e0f4b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29c700fece324cd7a8d2d01e404c5729', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 744.152708] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Successfully updated port: 327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.186163] env[69796]: DEBUG nova.compute.manager [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Received event network-changed-6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 744.186163] env[69796]: DEBUG nova.compute.manager [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Refreshing instance network info cache due to event network-changed-6196c31c-6907-4695-91cf-2de0c3cac58f. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 744.186163] env[69796]: DEBUG oslo_concurrency.lockutils [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] Acquiring lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.186425] env[69796]: DEBUG oslo_concurrency.lockutils [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] Acquired lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.187561] env[69796]: DEBUG nova.network.neutron [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Refreshing network info cache for port 6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 744.308920] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.323376] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234246, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.371848] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.395922] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.396197] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquired lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.397160] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.546780] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.594818] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595064] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d0e1a7df-f83f-43c2-a387-d2a378ff31b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595261] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595507] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595611] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595761] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.595910] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.596068] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 744.656095] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.656273] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquired lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.656448] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.779033] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Successfully created port: 71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.806828] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234247, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.825874] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234246, 'name': CreateVM_Task, 'duration_secs': 1.486556} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.826151] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 744.827375] env[69796]: DEBUG oslo_vmware.service [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e58a95e-65a2-4f14-a49a-489206ac081f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.835754] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 744.835754] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 744.835754] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 744.835754] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4acb471-149c-4c6a-9960-89859ebc4dd2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.841218] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 744.841218] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52d94063-5661-0b2d-ea3d-dd8d47c01c88" [ 744.841218] env[69796]: _type = "Task" [ 744.841218] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.854949] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52d94063-5661-0b2d-ea3d-dd8d47c01c88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.970241] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.103092] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance dd581b1c-35ba-44d3-80b5-49950fc49f5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 745.297672] env[69796]: DEBUG nova.network.neutron [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updating instance_info_cache with network_info: [{"id": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "address": "fa:16:3e:9c:f6:53", "network": {"id": "5cb7def8-e2c6-4b9b-9ea6-674c790c87f1", "bridge": "br-int", "label": "tempest-ServersTestJSON-758945376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54a54b9449f347b99d71c1bc2029cbeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b8c00f-8a", "ovs_interfaceid": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.308981] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234247, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.092139} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.308981] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 745.308981] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7232113e-3bea-402c-a993-58c26dcb0dc8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.335359] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 47005af8-11fe-498f-9b67-e0316faeeb8f/47005af8-11fe-498f-9b67-e0316faeeb8f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 745.336222] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34912840-c6f4-405d-9a4e-7ff72a56894d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.368970] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 745.368970] env[69796]: value = "task-4234248" [ 745.368970] env[69796]: _type = "Task" [ 745.368970] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.369408] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.369544] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 745.370245] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.370245] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.370245] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.373280] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb165725-5332-48d4-ab70-332df59e0c79 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.387178] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.387178] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 745.387178] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25545ac9-251f-4adc-84a1-0d52b36d4f57 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.394988] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d321e50-fa6a-49e5-bd14-7e853622b16b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.402272] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 745.402272] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b351e4-9d8e-13e8-c6a1-f94a200aa0e1" [ 745.402272] env[69796]: _type = "Task" [ 745.402272] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.413341] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b351e4-9d8e-13e8-c6a1-f94a200aa0e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.453186] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 745.558575] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.574029] env[69796]: DEBUG nova.network.neutron [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Updated VIF entry in instance network info cache for port 6196c31c-6907-4695-91cf-2de0c3cac58f. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 745.575291] env[69796]: DEBUG nova.network.neutron [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Updating instance_info_cache with network_info: [{"id": "6196c31c-6907-4695-91cf-2de0c3cac58f", "address": "fa:16:3e:33:8b:34", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6196c31c-69", "ovs_interfaceid": "6196c31c-6907-4695-91cf-2de0c3cac58f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.595973] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.596338] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.596414] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.596589] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.596731] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.596896] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.597113] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.597271] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.598630] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.598630] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.598630] env[69796]: DEBUG nova.virt.hardware [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.599770] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737e9e42-3a96-4d5d-8fa2-d9ed57bc231c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.606288] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 5abf4fa3-bb75-47ab-bd8a-4b181e840946 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 745.612845] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4da415-e1af-44f8-a570-0ad75a2706b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.695200] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Acquiring lock "cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.695200] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Lock "cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.765632] env[69796]: DEBUG nova.network.neutron [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Updating instance_info_cache with network_info: [{"id": "327722fe-c5c2-466f-92ca-c218e5304f72", "address": "fa:16:3e:c5:82:6b", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap327722fe-c5", "ovs_interfaceid": "327722fe-c5c2-466f-92ca-c218e5304f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 745.801013] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Releasing lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.802456] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Instance network_info: |[{"id": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "address": "fa:16:3e:9c:f6:53", "network": {"id": "5cb7def8-e2c6-4b9b-9ea6-674c790c87f1", "bridge": "br-int", "label": "tempest-ServersTestJSON-758945376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54a54b9449f347b99d71c1bc2029cbeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b8c00f-8a", "ovs_interfaceid": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.802750] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:f6:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd5970ab5-34b8-4065-bfa6-f568b8f103b7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65b8c00f-8a84-4930-a8b4-c7a8e994421b', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.809764] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Creating folder: Project (54a54b9449f347b99d71c1bc2029cbeb). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.810105] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-615ccb49-4c7f-4cdb-b16b-c04ee010b410 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.822306] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Created folder: Project (54a54b9449f347b99d71c1bc2029cbeb) in parent group-v837766. [ 745.822503] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Creating folder: Instances. Parent ref: group-v837782. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.822747] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c31de15f-1277-4c90-8e93-d674c7100f50 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.833549] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Created folder: Instances in parent group-v837782. [ 745.833818] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.834036] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.834273] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-908048f0-8803-4cdf-b073-7c8ed259932e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.859184] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.859184] env[69796]: value = "task-4234251" [ 745.859184] env[69796]: _type = "Task" [ 745.859184] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.870465] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234251, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.881533] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234248, 'name': ReconfigVM_Task, 'duration_secs': 0.388227} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.881533] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 47005af8-11fe-498f-9b67-e0316faeeb8f/47005af8-11fe-498f-9b67-e0316faeeb8f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.882271] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b89e91a-b4d0-4be3-97d3-5548d517894c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.893688] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 745.893688] env[69796]: value = "task-4234252" [ 745.893688] env[69796]: _type = "Task" [ 745.893688] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.905431] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234252, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.916660] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Preparing fetch location {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 745.917234] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Creating directory with path [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 745.917234] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72c733ac-8236-4397-ba1d-eb3f5cc5fda9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.935257] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Created directory with path [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 745.935568] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Fetch image to [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 745.935752] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Downloading image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk on the data store datastore1 {{(pid=69796) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 745.936689] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3669110-1ef4-4bea-b540-c807171b2c0d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.947213] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7748b63d-7fb1-4026-a0b7-239c039403c9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.962465] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760daa21-a2ae-4327-8586-651c086b89a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.001688] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2778260-2905-4603-9276-5e8b928368dc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.010925] env[69796]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8a675861-d090-42d8-8cb6-f7677be654ee {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.049916] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Downloading image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to the data store datastore1 {{(pid=69796) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 746.081902] env[69796]: DEBUG oslo_concurrency.lockutils [req-07e2ca54-1992-4a00-8417-684474afc6c6 req-0e176643-5eb4-4f2d-b0d2-bd422d7ce21c service nova] Releasing lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.120460] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 64ab714b-61b6-48be-a2cb-5a5df86f7512 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 746.122252] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 746.122443] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 746.135145] env[69796]: DEBUG oslo_vmware.rw_handles [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69796) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 746.201434] env[69796]: DEBUG nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.213757] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.213757] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.273434] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Releasing lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.273942] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Instance network_info: |[{"id": "327722fe-c5c2-466f-92ca-c218e5304f72", "address": "fa:16:3e:c5:82:6b", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap327722fe-c5", "ovs_interfaceid": "327722fe-c5c2-466f-92ca-c218e5304f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.280195] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:82:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee9f433-666e-4d74-96df-c7c7a6ac7fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '327722fe-c5c2-466f-92ca-c218e5304f72', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.289997] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Creating folder: Project (44db36ccd9b549969a2aa33ede90ed0c). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.291270] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4eece694-242f-4442-8a8f-fef40b48ed01 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.305047] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Created folder: Project (44db36ccd9b549969a2aa33ede90ed0c) in parent group-v837766. [ 746.305312] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Creating folder: Instances. Parent ref: group-v837785. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.308839] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3297891-d4ae-4bdf-9c6f-7bc43375f248 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.321514] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Created folder: Instances in parent group-v837785. [ 746.321822] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.322040] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.322433] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddb07981-e422-4c88-ae47-7a020edc3bad {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.352824] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.352824] env[69796]: value = "task-4234255" [ 746.352824] env[69796]: _type = "Task" [ 746.352824] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.364609] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234255, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.380554] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234251, 'name': CreateVM_Task, 'duration_secs': 0.483045} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.381326] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.382053] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.382361] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.382896] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.383808] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9aa3b896-88fb-4ee5-9b5c-f21334e2f8f6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.390777] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 746.390777] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]522db876-b36c-ac70-048a-3b206a980a5b" [ 746.390777] env[69796]: _type = "Task" [ 746.390777] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.412744] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]522db876-b36c-ac70-048a-3b206a980a5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.416990] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234252, 'name': Rename_Task, 'duration_secs': 0.201568} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.420143] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 746.421838] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-402b7bf9-9193-426f-b3a4-9c15cac10ce5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.430061] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 746.430061] env[69796]: value = "task-4234256" [ 746.430061] env[69796]: _type = "Task" [ 746.430061] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.440860] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.509306] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899722db-5cb1-4262-9729-5d0eaa0aaa79 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.522688] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed9cb1d-7743-4ef6-8512-f6f4838f67cc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.565188] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd921b7-3113-4618-904a-442bda645826 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.577204] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb934691-44e8-4412-bac3-cfff0654100f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.597621] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.702818] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Successfully updated port: 71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.723518] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 746.756645] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.807395] env[69796]: DEBUG nova.compute.manager [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Received event network-vif-plugged-327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 746.807395] env[69796]: DEBUG oslo_concurrency.lockutils [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] Acquiring lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.807395] env[69796]: DEBUG oslo_concurrency.lockutils [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.807395] env[69796]: DEBUG oslo_concurrency.lockutils [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.808825] env[69796]: DEBUG nova.compute.manager [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] No waiting events found dispatching network-vif-plugged-327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 746.809347] env[69796]: WARNING nova.compute.manager [req-518e5e5d-8b59-487a-b550-54aab130adc4 req-03801693-f3f9-487b-93bd-8721bfa7296f service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Received unexpected event network-vif-plugged-327722fe-c5c2-466f-92ca-c218e5304f72 for instance with vm_state building and task_state spawning. [ 746.865308] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234255, 'name': CreateVM_Task, 'duration_secs': 0.359797} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.866039] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 746.866322] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.909751] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.909751] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.910056] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.910374] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.910699] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 746.911136] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31718f61-2219-4499-add1-4686ac9ec5df {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.918271] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 746.918271] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52a9e79d-8ff7-d4ef-31b3-5890c47ef4ca" [ 746.918271] env[69796]: _type = "Task" [ 746.918271] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.930682] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52a9e79d-8ff7-d4ef-31b3-5890c47ef4ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.944629] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234256, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.048477] env[69796]: DEBUG oslo_vmware.rw_handles [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Completed reading data from the image iterator. {{(pid=69796) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 747.048813] env[69796]: DEBUG oslo_vmware.rw_handles [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69796) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 747.103797] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.128472] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Downloaded image file data 11e211db-44f8-4e34-8fec-8b87ab3fce6f to vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk on the data store datastore1 {{(pid=69796) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 747.132521] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Caching image {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 747.132804] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Copying Virtual Disk [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk to [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.133422] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee97c42e-5768-48d2-969f-2f4576ccdd52 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.153137] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 747.153137] env[69796]: value = "task-4234257" [ 747.153137] env[69796]: _type = "Task" [ 747.153137] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.164565] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234257, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.209628] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.209688] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquired lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.211309] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.229836] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "d746d66b-32df-4a4d-97bd-82b4ad364461" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.230186] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.256040] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.440374] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.440843] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.441254] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.449800] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234256, 'name': PowerOnVM_Task} progress is 86%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.615463] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69796) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 747.615463] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.081s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.615463] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.059s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.615463] env[69796]: INFO nova.compute.claims [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.622189] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.622626] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Getting list of instances from cluster (obj){ [ 747.622626] env[69796]: value = "domain-c8" [ 747.622626] env[69796]: _type = "ClusterComputeResource" [ 747.622626] env[69796]: } {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 747.624398] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503a840b-7ec7-41b4-8908-94c023aca0db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.649814] env[69796]: INFO nova.compute.manager [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Rebuilding instance [ 747.652647] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Got total of 7 instances {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 747.663753] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234257, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.730942] env[69796]: DEBUG nova.compute.manager [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 747.732204] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b42965-5e71-4795-8c9f-b4e2302b8e34 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.771899] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.944481] env[69796]: DEBUG oslo_vmware.api [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234256, 'name': PowerOnVM_Task, 'duration_secs': 1.076768} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.944649] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.944999] env[69796]: INFO nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Took 14.89 seconds to spawn the instance on the hypervisor. [ 747.945147] env[69796]: DEBUG nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 747.948446] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92b1f78-1432-40dd-a95b-1592e14b1730 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.089838] env[69796]: DEBUG nova.network.neutron [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Updating instance_info_cache with network_info: [{"id": "71d51eb3-e59f-4936-81b5-e8153da0b686", "address": "fa:16:3e:56:f0:62", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d51eb3-e5", "ovs_interfaceid": "71d51eb3-e59f-4936-81b5-e8153da0b686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.170679] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234257, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.297406] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "3020e505-513b-4b29-996a-6e70a212f508" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.297406] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.474355] env[69796]: INFO nova.compute.manager [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Took 24.16 seconds to build instance. [ 748.596964] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Releasing lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.597168] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Instance network_info: |[{"id": "71d51eb3-e59f-4936-81b5-e8153da0b686", "address": "fa:16:3e:56:f0:62", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d51eb3-e5", "ovs_interfaceid": "71d51eb3-e59f-4936-81b5-e8153da0b686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 748.597656] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:f0:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee9f433-666e-4d74-96df-c7c7a6ac7fda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71d51eb3-e59f-4936-81b5-e8153da0b686', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.611551] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Creating folder: Project (29c700fece324cd7a8d2d01e404c5729). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.611877] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-928a3c2f-cd06-4aae-8b8f-409ad1685bcf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.625192] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Created folder: Project (29c700fece324cd7a8d2d01e404c5729) in parent group-v837766. [ 748.625483] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Creating folder: Instances. Parent ref: group-v837788. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.626076] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4a6e4bd-f324-43b5-a075-163dfde3a6e8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.648234] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Created folder: Instances in parent group-v837788. [ 748.648234] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 748.648234] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.648694] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31bed23e-61b3-4cba-98b2-ae787adf384f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.687615] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234257, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.250928} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.689205] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Copied Virtual Disk [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk to [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 748.689407] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleting the datastore file [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f/tmp-sparse.vmdk {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.689659] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.689659] env[69796]: value = "task-4234260" [ 748.689659] env[69796]: _type = "Task" [ 748.689659] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.689843] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-beefdc22-cd13-4d7c-afc5-9e90fb4dd6fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.705915] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234260, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.709319] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 748.709319] env[69796]: value = "task-4234261" [ 748.709319] env[69796]: _type = "Task" [ 748.709319] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.719030] env[69796]: DEBUG oslo_concurrency.lockutils [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "interface-47f223c0-12b0-4eda-ab42-81fe8b95afac-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.719030] env[69796]: DEBUG oslo_concurrency.lockutils [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "interface-47f223c0-12b0-4eda-ab42-81fe8b95afac-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.719030] env[69796]: DEBUG nova.objects.instance [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lazy-loading 'flavor' on Instance uuid 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.730374] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234261, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.757955] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 748.758638] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1b98351-dad2-44ea-aa56-eac314f9ed22 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.767395] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 748.767395] env[69796]: value = "task-4234262" [ 748.767395] env[69796]: _type = "Task" [ 748.767395] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.780519] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.837394] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.837567] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.976884] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f7c00868-64e6-49ce-8075-5e9dc78cd478 tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.680s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.983440] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.211s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.984077] env[69796]: INFO nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] During sync_power_state the instance has a pending task (spawning). Skip. [ 748.984077] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.009695] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c6cb77-f3b8-4feb-9bd9-285720ed158b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.020012] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca27c61d-96bf-4ad4-8ecf-33eac43bab0f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.059453] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78c05ff-128a-4309-af27-fae5742c16b6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.069819] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302cea89-6753-4025-bd1c-41f313b83cc1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.080567] env[69796]: DEBUG nova.compute.manager [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Received event network-vif-plugged-65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 749.080809] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.081134] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.081418] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.081676] env[69796]: DEBUG nova.compute.manager [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] No waiting events found dispatching network-vif-plugged-65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 749.081959] env[69796]: WARNING nova.compute.manager [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Received unexpected event network-vif-plugged-65b8c00f-8a84-4930-a8b4-c7a8e994421b for instance with vm_state building and task_state spawning. [ 749.084624] env[69796]: DEBUG nova.compute.manager [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Received event network-changed-65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 749.084865] env[69796]: DEBUG nova.compute.manager [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Refreshing instance network info cache due to event network-changed-65b8c00f-8a84-4930-a8b4-c7a8e994421b. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 749.085149] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Acquiring lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.085362] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Acquired lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.087317] env[69796]: DEBUG nova.network.neutron [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Refreshing network info cache for port 65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.107842] env[69796]: DEBUG nova.compute.provider_tree [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 749.211255] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234260, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.226449] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234261, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.054325} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.226769] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.226988] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Moving file from [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e/11e211db-44f8-4e34-8fec-8b87ab3fce6f to [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f. {{(pid=69796) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 749.227358] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-5b3a7a82-1b4c-4b48-91c0-dee6ecc19738 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.231682] env[69796]: DEBUG nova.objects.instance [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lazy-loading 'pci_requests' on Instance uuid 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 749.240241] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 749.240241] env[69796]: value = "task-4234263" [ 749.240241] env[69796]: _type = "Task" [ 749.240241] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.251865] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234263, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.282737] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "119768d0-2727-4ef8-b28b-c01cd46fc671" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.282953] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "119768d0-2727-4ef8-b28b-c01cd46fc671" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.284285] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234262, 'name': PowerOffVM_Task, 'duration_secs': 0.16913} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.284922] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 749.285189] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 749.286335] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fcce81-e2a6-4abb-baf9-c8ca1c62af4a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.298512] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 749.299174] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb477bc6-6d52-4c3a-80b4-d787a139d7cc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.330815] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 749.330815] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 749.330815] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleting the datastore file [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.330815] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76ec1e97-de3b-4b09-be7a-b97eefcc770a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.339870] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 749.339870] env[69796]: value = "task-4234265" [ 749.339870] env[69796]: _type = "Task" [ 749.339870] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.350556] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234265, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.484586] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 749.529515] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Acquiring lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.531652] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.531906] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Acquiring lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.532166] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.532333] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.542016] env[69796]: INFO nova.compute.manager [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Terminating instance [ 749.646242] env[69796]: ERROR nova.scheduler.client.report [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [req-72e68577-0492-4b7a-b8ed-4d5f69efdeff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-72e68577-0492-4b7a-b8ed-4d5f69efdeff"}]} [ 749.646242] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.032s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.649234] env[69796]: ERROR nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Traceback (most recent call last): [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] yield [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] self.set_inventory_for_provider( [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 749.649234] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-72e68577-0492-4b7a-b8ed-4d5f69efdeff"}]} [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] During handling of the above exception, another exception occurred: [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Traceback (most recent call last): [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] with self.rt.instance_claim(context, instance, node, allocs, [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 749.649613] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] return f(*args, **kwargs) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] self._update(elevated, cn) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] self._update_to_placement(context, compute_node, startup) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] return attempt.get(self._wrap_exception) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] raise value [ 749.652196] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] self.reportclient.update_from_provider_tree( [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] with catch_all(pd.uuid): [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] self.gen.throw(typ, value, traceback) [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] raise exception.ResourceProviderSyncFailed() [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 749.652651] env[69796]: ERROR nova.compute.manager [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] [ 749.652954] env[69796]: DEBUG nova.compute.utils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 749.653144] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.129s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.654791] env[69796]: INFO nova.compute.claims [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.667128] env[69796]: DEBUG nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Build of instance dd581b1c-35ba-44d3-80b5-49950fc49f5f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 749.667128] env[69796]: DEBUG nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 749.667128] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Acquiring lock "refresh_cache-dd581b1c-35ba-44d3-80b5-49950fc49f5f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.667128] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Acquired lock "refresh_cache-dd581b1c-35ba-44d3-80b5-49950fc49f5f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.667582] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.706523] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234260, 'name': CreateVM_Task, 'duration_secs': 0.563761} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.707030] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.707743] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.707925] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.708271] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.708792] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb15be45-981e-4314-aea5-91739ef700c2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.718123] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 749.718123] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]525863ef-7c62-4a40-884e-5fec61d057da" [ 749.718123] env[69796]: _type = "Task" [ 749.718123] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.730642] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]525863ef-7c62-4a40-884e-5fec61d057da, 'name': SearchDatastore_Task, 'duration_secs': 0.01058} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.733437] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.733892] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.734248] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.735082] env[69796]: DEBUG nova.objects.base [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Object Instance<47f223c0-12b0-4eda-ab42-81fe8b95afac> lazy-loaded attributes: flavor,pci_requests {{(pid=69796) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 749.735082] env[69796]: DEBUG nova.network.neutron [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.749765] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234263, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.050105} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.750041] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] File moved {{(pid=69796) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 749.750145] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Cleaning up location [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 749.750311] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleting the datastore file [datastore1] vmware_temp/71c40165-cc6b-4ffd-bf5b-98ec94eddd6e {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 749.751171] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9600e647-b74f-4d55-8681-72675f39fcb7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.761875] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 749.761875] env[69796]: value = "task-4234266" [ 749.761875] env[69796]: _type = "Task" [ 749.761875] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.774090] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234266, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.853851] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234265, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299093} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.854188] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 749.854469] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 749.854681] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.006528] env[69796]: DEBUG oslo_concurrency.lockutils [None req-134f8dc3-0efe-4518-b1c4-5577d8df082f tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "interface-47f223c0-12b0-4eda-ab42-81fe8b95afac-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.288s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.016042] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.046022] env[69796]: DEBUG nova.compute.manager [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 750.046022] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.046022] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf451fc-b36a-4633-82ef-e137fb65b579 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.054320] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 750.054648] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25544219-9b41-4bdf-aa82-bbc021ad8d77 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.062849] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Waiting for the task: (returnval){ [ 750.062849] env[69796]: value = "task-4234267" [ 750.062849] env[69796]: _type = "Task" [ 750.062849] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.073903] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Task: {'id': task-4234267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.185588] env[69796]: DEBUG nova.network.neutron [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updated VIF entry in instance network info cache for port 65b8c00f-8a84-4930-a8b4-c7a8e994421b. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.185588] env[69796]: DEBUG nova.network.neutron [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updating instance_info_cache with network_info: [{"id": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "address": "fa:16:3e:9c:f6:53", "network": {"id": "5cb7def8-e2c6-4b9b-9ea6-674c790c87f1", "bridge": "br-int", "label": "tempest-ServersTestJSON-758945376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54a54b9449f347b99d71c1bc2029cbeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b8c00f-8a", "ovs_interfaceid": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.212744] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.277442] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234266, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035826} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.279996] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 750.279996] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d32f3c0f-acef-48ea-aec6-ace500c021db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.287696] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 750.287696] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b600a3-c56d-257a-5671-5af17ac41877" [ 750.287696] env[69796]: _type = "Task" [ 750.287696] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.300043] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b600a3-c56d-257a-5671-5af17ac41877, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.399070] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.576892] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Task: {'id': task-4234267, 'name': PowerOffVM_Task, 'duration_secs': 0.346729} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.577195] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 750.577367] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 750.577629] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-504dad45-a9ab-42c3-9673-78c71cdae216 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.654868] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 750.655226] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 750.655409] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Deleting the datastore file [datastore2] d0e1a7df-f83f-43c2-a387-d2a378ff31b6 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 750.656481] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74eca2db-5a0e-48aa-8b72-9c45f7ae7c95 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.662395] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Waiting for the task: (returnval){ [ 750.662395] env[69796]: value = "task-4234269" [ 750.662395] env[69796]: _type = "Task" [ 750.662395] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.675132] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Task: {'id': task-4234269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.688409] env[69796]: DEBUG oslo_concurrency.lockutils [req-57e2a7ca-e12a-4a35-9329-dfbe121c633c req-8cb20e2a-cc67-4c4c-8750-602ff19cbfdb service nova] Releasing lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.695036] env[69796]: DEBUG nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 750.713432] env[69796]: DEBUG nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 750.713793] env[69796]: DEBUG nova.compute.provider_tree [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 750.731626] env[69796]: DEBUG nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 750.753635] env[69796]: DEBUG nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 750.802021] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b600a3-c56d-257a-5671-5af17ac41877, 'name': SearchDatastore_Task, 'duration_secs': 0.012432} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.802677] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.803126] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 7f37f6c9-adba-4292-9d47-c455f77e539f/7f37f6c9-adba-4292-9d47-c455f77e539f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.803841] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.804202] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.806442] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2efd070e-f5e0-42e6-a10d-199b18f9c04e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.809865] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57bd5a95-41e7-4720-8751-80141c390996 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.818677] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 750.818677] env[69796]: value = "task-4234270" [ 750.818677] env[69796]: _type = "Task" [ 750.818677] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.833614] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234270, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.835016] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.835210] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 750.836062] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d58b3749-d368-4f71-9886-ff9013b4f310 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.842260] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 750.842260] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]522e448d-4403-5443-8336-63b400f51494" [ 750.842260] env[69796]: _type = "Task" [ 750.842260] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.856425] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]522e448d-4403-5443-8336-63b400f51494, 'name': SearchDatastore_Task, 'duration_secs': 0.011386} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.862927] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49909556-93a9-4f9c-a6be-d7edbd8874ab {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.872386] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 750.872386] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5227be03-7339-4d75-e37d-976691562e21" [ 750.872386] env[69796]: _type = "Task" [ 750.872386] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.886981] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5227be03-7339-4d75-e37d-976691562e21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.898399] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 750.898600] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 750.898803] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 750.898933] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 750.899109] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 750.899294] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 750.899470] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 750.899639] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 750.899813] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 750.899947] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 750.900143] env[69796]: DEBUG nova.virt.hardware [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 750.901039] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5ad2de-e944-4085-bd24-963eff2dab9e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.906723] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Releasing lock "refresh_cache-dd581b1c-35ba-44d3-80b5-49950fc49f5f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.906970] env[69796]: DEBUG nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 750.907175] env[69796]: DEBUG nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 750.907370] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 750.915471] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974d4ca1-a44a-421a-abde-4a24883705d3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.933106] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance VIF info [] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.939546] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.940644] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.941906] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.944863] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7be0df10-a9af-44ce-8991-ea057053caac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.965844] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.965844] env[69796]: value = "task-4234271" [ 750.965844] env[69796]: _type = "Task" [ 750.965844] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.975211] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234271, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.085149] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ef3d3d-5b8d-4b12-9314-c121e60cfe37 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.093576] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc0c50e-fb94-4e00-aa75-298d0d4e60d3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.126139] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e255cc16-3e92-4b0c-995e-4057f521e69f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.134696] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7316aa12-432d-4249-bc58-33f58290ed99 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.151859] env[69796]: DEBUG nova.compute.provider_tree [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 751.176278] env[69796]: DEBUG oslo_vmware.api [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Task: {'id': task-4234269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294513} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.176278] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 751.176278] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 751.176493] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 751.176493] env[69796]: INFO nova.compute.manager [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 751.176734] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.176920] env[69796]: DEBUG nova.compute.manager [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 751.177020] env[69796]: DEBUG nova.network.neutron [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 751.331603] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234270, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.385097] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5227be03-7339-4d75-e37d-976691562e21, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.385530] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.385988] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 836605ee-50cb-48b0-ba2e-33db3832f8ba/836605ee-50cb-48b0-ba2e-33db3832f8ba.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.386391] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.386645] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.386972] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a5573c5-d55e-4059-8d5a-e687181c2894 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.392080] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f8dab8b-a32a-432d-90ba-314880a31f2c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.400987] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 751.400987] env[69796]: value = "task-4234272" [ 751.400987] env[69796]: _type = "Task" [ 751.400987] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.407839] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.408195] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.413239] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18ac304a-5fa8-485b-a5dd-f1f86e5e4b78 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.415778] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.421475] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 751.421475] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528e5113-96f8-7ba2-5c0d-e09452389bfd" [ 751.421475] env[69796]: _type = "Task" [ 751.421475] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.433762] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528e5113-96f8-7ba2-5c0d-e09452389bfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.443669] env[69796]: DEBUG nova.network.neutron [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.478933] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234271, 'name': CreateVM_Task} progress is 25%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.677225] env[69796]: ERROR nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [req-aa637b1f-7384-4d59-aa53-5f0dda0784f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aa637b1f-7384-4d59-aa53-5f0dda0784f0"}]} [ 751.677225] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.025s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.677859] env[69796]: ERROR nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Traceback (most recent call last): [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] yield [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] self.set_inventory_for_provider( [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 751.677859] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aa637b1f-7384-4d59-aa53-5f0dda0784f0"}]} [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] During handling of the above exception, another exception occurred: [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Traceback (most recent call last): [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] with self.rt.instance_claim(context, instance, node, allocs, [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 751.678097] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] return f(*args, **kwargs) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] self._update(elevated, cn) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] self._update_to_placement(context, compute_node, startup) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] return attempt.get(self._wrap_exception) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] six.reraise(self.value[0], self.value[1], self.value[2]) [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] raise value [ 751.678392] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] self.reportclient.update_from_provider_tree( [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] with catch_all(pd.uuid): [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] self.gen.throw(typ, value, traceback) [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] raise exception.ResourceProviderSyncFailed() [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 751.678742] env[69796]: ERROR nova.compute.manager [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] [ 751.679061] env[69796]: DEBUG nova.compute.utils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 751.679875] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.308s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.681388] env[69796]: INFO nova.compute.claims [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.684418] env[69796]: DEBUG nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Build of instance 5abf4fa3-bb75-47ab-bd8a-4b181e840946 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 751.684840] env[69796]: DEBUG nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 751.685140] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Acquiring lock "refresh_cache-5abf4fa3-bb75-47ab-bd8a-4b181e840946" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.685326] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Acquired lock "refresh_cache-5abf4fa3-bb75-47ab-bd8a-4b181e840946" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.685449] env[69796]: DEBUG nova.network.neutron [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 751.800826] env[69796]: DEBUG nova.compute.manager [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Received event network-changed-327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 751.801041] env[69796]: DEBUG nova.compute.manager [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Refreshing instance network info cache due to event network-changed-327722fe-c5c2-466f-92ca-c218e5304f72. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 751.801264] env[69796]: DEBUG oslo_concurrency.lockutils [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] Acquiring lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.801402] env[69796]: DEBUG oslo_concurrency.lockutils [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] Acquired lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.801563] env[69796]: DEBUG nova.network.neutron [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Refreshing network info cache for port 327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.834026] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234270, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.667359} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.834026] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 7f37f6c9-adba-4292-9d47-c455f77e539f/7f37f6c9-adba-4292-9d47-c455f77e539f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.834026] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.834026] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62cbf343-1bb7-4e85-b3e0-2e3e9ec87ffe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.845460] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 751.845460] env[69796]: value = "task-4234273" [ 751.845460] env[69796]: _type = "Task" [ 751.845460] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.857748] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234273, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.914038] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234272, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.940085] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528e5113-96f8-7ba2-5c0d-e09452389bfd, 'name': SearchDatastore_Task, 'duration_secs': 0.040206} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.940920] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e43f9df1-8a78-45ef-8dd5-96100552206a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.947189] env[69796]: INFO nova.compute.manager [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] [instance: dd581b1c-35ba-44d3-80b5-49950fc49f5f] Took 1.04 seconds to deallocate network for instance. [ 751.952409] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 751.952409] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e3543e-f811-f0b9-c660-071f444b9f22" [ 751.952409] env[69796]: _type = "Task" [ 751.952409] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.963994] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e3543e-f811-f0b9-c660-071f444b9f22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.980399] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234271, 'name': CreateVM_Task, 'duration_secs': 0.86408} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.981146] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.981701] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.981920] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.982327] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.982591] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f79531b-ff77-4cb7-b45e-e05b914e0d39 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.989637] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 751.989637] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526a5221-670a-9f38-4484-bd0656e56e6a" [ 751.989637] env[69796]: _type = "Task" [ 751.989637] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.002427] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526a5221-670a-9f38-4484-bd0656e56e6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.112688] env[69796]: DEBUG nova.network.neutron [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.220329] env[69796]: DEBUG nova.network.neutron [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 752.339619] env[69796]: DEBUG nova.network.neutron [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.359602] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234273, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.157215} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.359868] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.360806] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647c772b-30a9-4db7-8402-9db19e4113e4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.385962] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 7f37f6c9-adba-4292-9d47-c455f77e539f/7f37f6c9-adba-4292-9d47-c455f77e539f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.386355] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-399ce874-01a8-4837-b1a3-8061f67633b5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.416888] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.851941} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.420975] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 836605ee-50cb-48b0-ba2e-33db3832f8ba/836605ee-50cb-48b0-ba2e-33db3832f8ba.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 752.421369] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 752.422389] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 752.422389] env[69796]: value = "task-4234274" [ 752.422389] env[69796]: _type = "Task" [ 752.422389] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.422573] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f9c40e9-80c9-4f40-8bb1-b92b9be71ff1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.434973] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234274, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.438772] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 752.438772] env[69796]: value = "task-4234275" [ 752.438772] env[69796]: _type = "Task" [ 752.438772] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.448114] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234275, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.474182] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e3543e-f811-f0b9-c660-071f444b9f22, 'name': SearchDatastore_Task, 'duration_secs': 0.060212} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.474182] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.474182] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] a4a16667-cd00-4850-9389-0bd57c7efd74/a4a16667-cd00-4850-9389-0bd57c7efd74.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 752.474182] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.474478] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.474478] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebfb1202-41db-4666-9f83-5804ff053629 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.476029] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1321d88f-d18e-4f2b-8ce9-98353e8f1a0d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.484662] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 752.484662] env[69796]: value = "task-4234276" [ 752.484662] env[69796]: _type = "Task" [ 752.484662] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.493459] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.493459] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.495765] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e61464c7-ba28-44a2-994d-c840bab27589 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.504861] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234276, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.512107] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]526a5221-670a-9f38-4484-bd0656e56e6a, 'name': SearchDatastore_Task, 'duration_secs': 0.052717} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.513665] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.513857] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.514098] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.514433] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 752.514433] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52accedc-ee59-8b4b-258a-09b4e8b3f9d4" [ 752.514433] env[69796]: _type = "Task" [ 752.514433] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.526633] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52accedc-ee59-8b4b-258a-09b4e8b3f9d4, 'name': SearchDatastore_Task, 'duration_secs': 0.013055} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.527634] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bf9e0c2-1062-41ad-87ab-07fc01e056f6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.535510] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 752.535510] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f6480d-cf1a-5aee-a618-d2576debd1d3" [ 752.535510] env[69796]: _type = "Task" [ 752.535510] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.550622] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f6480d-cf1a-5aee-a618-d2576debd1d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.554031] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "6d0b5852-7b75-4054-9eb8-5af0496d800d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.554377] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "6d0b5852-7b75-4054-9eb8-5af0496d800d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.615809] env[69796]: INFO nova.compute.manager [-] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Took 1.44 seconds to deallocate network for instance. [ 752.664053] env[69796]: DEBUG nova.network.neutron [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Updated VIF entry in instance network info cache for port 327722fe-c5c2-466f-92ca-c218e5304f72. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.664862] env[69796]: DEBUG nova.network.neutron [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Updating instance_info_cache with network_info: [{"id": "327722fe-c5c2-466f-92ca-c218e5304f72", "address": "fa:16:3e:c5:82:6b", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap327722fe-c5", "ovs_interfaceid": "327722fe-c5c2-466f-92ca-c218e5304f72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.713635] env[69796]: DEBUG nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 752.734112] env[69796]: DEBUG nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 752.734395] env[69796]: DEBUG nova.compute.provider_tree [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 752.748229] env[69796]: DEBUG nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 752.774389] env[69796]: DEBUG nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 752.843026] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Releasing lock "refresh_cache-5abf4fa3-bb75-47ab-bd8a-4b181e840946" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.843477] env[69796]: DEBUG nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 752.843534] env[69796]: DEBUG nova.compute.manager [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] [instance: 5abf4fa3-bb75-47ab-bd8a-4b181e840946] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 752.936548] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234274, 'name': ReconfigVM_Task, 'duration_secs': 0.435168} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.939503] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 7f37f6c9-adba-4292-9d47-c455f77e539f/7f37f6c9-adba-4292-9d47-c455f77e539f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.941041] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a4cc741-2407-4c51-9239-0bfd12f186a9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.957567] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234275, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082858} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.961818] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 752.962299] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 752.962299] env[69796]: value = "task-4234277" [ 752.962299] env[69796]: _type = "Task" [ 752.962299] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.963792] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fc294c-8f4f-46d3-8eb4-fc901b380262 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.998101] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 836605ee-50cb-48b0-ba2e-33db3832f8ba/836605ee-50cb-48b0-ba2e-33db3832f8ba.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.004952] env[69796]: INFO nova.scheduler.client.report [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Deleted allocations for instance dd581b1c-35ba-44d3-80b5-49950fc49f5f [ 753.014202] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76c3aee4-38c7-472b-9d9e-892e914eedc6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.030494] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234277, 'name': Rename_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.039951] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234276, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503798} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.043911] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] a4a16667-cd00-4850-9389-0bd57c7efd74/a4a16667-cd00-4850-9389-0bd57c7efd74.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.044239] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.044577] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 753.044577] env[69796]: value = "task-4234278" [ 753.044577] env[69796]: _type = "Task" [ 753.044577] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.047786] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e205a4fb-fab5-4deb-898c-11b34808d5c1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.059512] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f6480d-cf1a-5aee-a618-d2576debd1d3, 'name': SearchDatastore_Task, 'duration_secs': 0.013091} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.059512] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.059664] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 8b103adc-9903-406f-8fd1-e193e00cde11/8b103adc-9903-406f-8fd1-e193e00cde11.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.059861] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.060052] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 753.060819] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d10d0997-f2c8-4339-a56f-fd8aedce9259 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.068512] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25298a7d-ddfa-4db0-8344-845446a3a105 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.070790] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234278, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.071245] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 753.071245] env[69796]: value = "task-4234279" [ 753.071245] env[69796]: _type = "Task" [ 753.071245] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.081973] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 753.081973] env[69796]: value = "task-4234280" [ 753.081973] env[69796]: _type = "Task" [ 753.081973] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.090025] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 753.090025] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 753.091737] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff26cfd-c782-44e9-a367-0410e509cf4c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.095021] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.108165] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.109031] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 753.109031] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52fc0c22-9798-61fc-3f6f-dfbef9e17fbf" [ 753.109031] env[69796]: _type = "Task" [ 753.109031] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.123674] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52fc0c22-9798-61fc-3f6f-dfbef9e17fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.010268} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.125410] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.126088] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce1ff843-75f4-49d8-aa3e-129dee481217 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.133758] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 753.133758] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52277f04-cf82-ab37-faed-80dd0d84c67b" [ 753.133758] env[69796]: _type = "Task" [ 753.133758] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.145387] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52277f04-cf82-ab37-faed-80dd0d84c67b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.158579] env[69796]: DEBUG nova.compute.manager [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Received event network-vif-plugged-71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 753.158970] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Acquiring lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.159304] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.160985] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.160985] env[69796]: DEBUG nova.compute.manager [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] No waiting events found dispatching network-vif-plugged-71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 753.160985] env[69796]: WARNING nova.compute.manager [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Received unexpected event network-vif-plugged-71d51eb3-e59f-4936-81b5-e8153da0b686 for instance with vm_state building and task_state spawning. [ 753.160985] env[69796]: DEBUG nova.compute.manager [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Received event network-changed-71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 753.160985] env[69796]: DEBUG nova.compute.manager [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Refreshing instance network info cache due to event network-changed-71d51eb3-e59f-4936-81b5-e8153da0b686. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 753.161287] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Acquiring lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.161287] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Acquired lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.161357] env[69796]: DEBUG nova.network.neutron [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Refreshing network info cache for port 71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 753.167784] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0be358-8f19-423c-b9f5-030e4d1f8ee4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.173324] env[69796]: DEBUG oslo_concurrency.lockutils [req-5b1aa721-9ac5-4f38-ae80-8fddf12eff1c req-fe389ea2-8eb6-4af6-bf1c-70c6e97b8fc9 service nova] Releasing lock "refresh_cache-a4a16667-cd00-4850-9389-0bd57c7efd74" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.180209] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc23f46b-3f51-4498-8568-e1979bc120e7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.218661] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58018ec5-38a9-4c09-82d6-1bcd6c304261 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.228359] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6f4ea8-e0f4-4829-9e99-8dfe0f090752 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.245105] env[69796]: DEBUG nova.compute.provider_tree [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 753.478186] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234277, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.509156] env[69796]: DEBUG nova.compute.manager [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Received event network-changed {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 753.509290] env[69796]: DEBUG nova.compute.manager [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Refreshing instance network info cache due to event network-changed. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 753.509510] env[69796]: DEBUG oslo_concurrency.lockutils [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] Acquiring lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.509650] env[69796]: DEBUG oslo_concurrency.lockutils [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] Acquired lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.509806] env[69796]: DEBUG nova.network.neutron [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.534340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0c24db74-dcf6-4fc2-8c5d-0a046706a216 tempest-VolumesAssistedSnapshotsTest-805540586 tempest-VolumesAssistedSnapshotsTest-805540586-project-member] Lock "dd581b1c-35ba-44d3-80b5-49950fc49f5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.007s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.562445] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234278, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.585988] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08031} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.589684] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.590320] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfa5750-a4ac-4b3e-b3a2-fc116591f3f9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.599614] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.618718] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] a4a16667-cd00-4850-9389-0bd57c7efd74/a4a16667-cd00-4850-9389-0bd57c7efd74.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.619034] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dc798e5-45bb-4607-9b0e-d3a5b0d8ca2a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.647586] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52277f04-cf82-ab37-faed-80dd0d84c67b, 'name': SearchDatastore_Task, 'duration_secs': 0.017714} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.650214] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.650214] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.650462] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 753.650462] env[69796]: value = "task-4234281" [ 753.650462] env[69796]: _type = "Task" [ 753.650462] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.650777] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62e1bf4c-4d1b-4414-b257-55022cc29e45 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.668287] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234281, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.668447] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 753.668447] env[69796]: value = "task-4234282" [ 753.668447] env[69796]: _type = "Task" [ 753.668447] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.679996] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234282, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.773489] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.773801] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.774008] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.774134] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.774327] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.777733] env[69796]: INFO nova.compute.manager [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Terminating instance [ 753.785325] env[69796]: ERROR nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [req-3b2257dc-e732-4144-9047-7774e64d79cc] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3b2257dc-e732-4144-9047-7774e64d79cc"}]} [ 753.785852] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.106s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.786693] env[69796]: ERROR nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Traceback (most recent call last): [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] yield [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] self.set_inventory_for_provider( [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 753.786693] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3b2257dc-e732-4144-9047-7774e64d79cc"}]} [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] During handling of the above exception, another exception occurred: [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Traceback (most recent call last): [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] with self.rt.instance_claim(context, instance, node, allocs, [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 753.787276] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] return f(*args, **kwargs) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] self._update(elevated, cn) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] self._update_to_placement(context, compute_node, startup) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] return attempt.get(self._wrap_exception) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] six.reraise(self.value[0], self.value[1], self.value[2]) [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] raise value [ 753.787733] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] self.reportclient.update_from_provider_tree( [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] with catch_all(pd.uuid): [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] self.gen.throw(typ, value, traceback) [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] raise exception.ResourceProviderSyncFailed() [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 753.788320] env[69796]: ERROR nova.compute.manager [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] [ 753.788822] env[69796]: DEBUG nova.compute.utils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 753.790337] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.034s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.794809] env[69796]: INFO nova.compute.claims [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.806073] env[69796]: DEBUG nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Build of instance 64ab714b-61b6-48be-a2cb-5a5df86f7512 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 753.807089] env[69796]: DEBUG nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 753.807391] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Acquiring lock "refresh_cache-64ab714b-61b6-48be-a2cb-5a5df86f7512" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.807503] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Acquired lock "refresh_cache-64ab714b-61b6-48be-a2cb-5a5df86f7512" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 753.807683] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 753.817384] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "fcc5bac9-b312-4d4f-8ffb-828ee110aa60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 753.819494] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "fcc5bac9-b312-4d4f-8ffb-828ee110aa60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.886819] env[69796]: INFO nova.scheduler.client.report [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Deleted allocations for instance 5abf4fa3-bb75-47ab-bd8a-4b181e840946 [ 753.979254] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234277, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.037556] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.060383] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234278, 'name': ReconfigVM_Task, 'duration_secs': 0.634666} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.061110] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 836605ee-50cb-48b0-ba2e-33db3832f8ba/836605ee-50cb-48b0-ba2e-33db3832f8ba.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.061243] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07d2b95a-678d-49f9-b6de-672dae99bc39 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.068332] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 754.068332] env[69796]: value = "task-4234283" [ 754.068332] env[69796]: _type = "Task" [ 754.068332] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.078142] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234283, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.091504] env[69796]: DEBUG nova.network.neutron [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Updated VIF entry in instance network info cache for port 71d51eb3-e59f-4936-81b5-e8153da0b686. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 754.091847] env[69796]: DEBUG nova.network.neutron [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Updating instance_info_cache with network_info: [{"id": "71d51eb3-e59f-4936-81b5-e8153da0b686", "address": "fa:16:3e:56:f0:62", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71d51eb3-e5", "ovs_interfaceid": "71d51eb3-e59f-4936-81b5-e8153da0b686", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.100898] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.165596] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234281, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.180677] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234282, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.286031] env[69796]: DEBUG nova.compute.manager [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.286358] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.287509] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dff1d8-5bbe-423b-a286-2c4ec169e962 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.296592] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.297014] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df631c07-3c87-4f31-ac38-0bc8cdb5720a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.304291] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 754.304291] env[69796]: value = "task-4234284" [ 754.304291] env[69796]: _type = "Task" [ 754.304291] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.314468] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.361229] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 754.397889] env[69796]: DEBUG oslo_concurrency.lockutils [None req-74aed149-c8ff-44e6-b5ef-4f84bb72447c tempest-ServerDiagnosticsV248Test-587350757 tempest-ServerDiagnosticsV248Test-587350757-project-member] Lock "5abf4fa3-bb75-47ab-bd8a-4b181e840946" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.908s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.426263] env[69796]: DEBUG nova.network.neutron [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Updating instance_info_cache with network_info: [{"id": "6196c31c-6907-4695-91cf-2de0c3cac58f", "address": "fa:16:3e:33:8b:34", "network": {"id": "078ddec6-376c-4295-9e59-ab62f27e997f", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "21c30a7ac31746b2847e98c1cec76d99", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee9f433-666e-4d74-96df-c7c7a6ac7fda", "external-id": "nsx-vlan-transportzone-499", "segmentation_id": 499, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6196c31c-69", "ovs_interfaceid": "6196c31c-6907-4695-91cf-2de0c3cac58f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.482712] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234277, 'name': Rename_Task, 'duration_secs': 1.17076} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.487295] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.487719] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a89c834d-e1d8-4fd2-b207-eb0fafba0ead {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.498264] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 754.498264] env[69796]: value = "task-4234285" [ 754.498264] env[69796]: _type = "Task" [ 754.498264] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.516594] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234285, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.582017] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.590597] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234283, 'name': Rename_Task, 'duration_secs': 0.259658} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.596400] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 754.597625] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.599321] env[69796]: DEBUG oslo_concurrency.lockutils [req-fb708386-8d25-498c-b6ad-a29934574e4c req-b8fc8174-37b2-4b1b-ac7a-6a88ff23ccb4 service nova] Releasing lock "refresh_cache-8b103adc-9903-406f-8fd1-e193e00cde11" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.599784] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7df44cfe-2eb9-4bf1-93ee-1bdbbe61579b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.611988] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234280, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.613809] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 754.613809] env[69796]: value = "task-4234286" [ 754.613809] env[69796]: _type = "Task" [ 754.613809] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.628337] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.666885] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234281, 'name': ReconfigVM_Task, 'duration_secs': 0.696482} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.667288] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Reconfigured VM instance instance-00000006 to attach disk [datastore1] a4a16667-cd00-4850-9389-0bd57c7efd74/a4a16667-cd00-4850-9389-0bd57c7efd74.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.668053] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f93c25a9-ce5a-4f3e-aa8a-dbd8bd47063a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.681314] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 754.681314] env[69796]: value = "task-4234287" [ 754.681314] env[69796]: _type = "Task" [ 754.681314] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.687784] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234282, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.701012] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234287, 'name': Rename_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.817402] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234284, 'name': PowerOffVM_Task, 'duration_secs': 0.360886} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.817854] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 754.818776] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 754.818776] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc03a1c2-25c1-46e1-80b6-2366abe18757 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.849962] env[69796]: DEBUG nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 754.875766] env[69796]: DEBUG nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 754.875766] env[69796]: DEBUG nova.compute.provider_tree [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 754.904092] env[69796]: DEBUG nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 754.904823] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.910129] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 754.912034] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 754.912034] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Deleting the datastore file [datastore2] 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 754.912034] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa6d8e9c-a5ad-4191-bd3f-d3c9a4492e37 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.919871] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for the task: (returnval){ [ 754.919871] env[69796]: value = "task-4234289" [ 754.919871] env[69796]: _type = "Task" [ 754.919871] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.926840] env[69796]: DEBUG nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 754.930550] env[69796]: DEBUG oslo_concurrency.lockutils [None req-629e5ddd-ae62-48a3-8b8c-0bdd4bbf5f3d tempest-ServerExternalEventsTest-1697576761 tempest-ServerExternalEventsTest-1697576761-project] Releasing lock "refresh_cache-47005af8-11fe-498f-9b67-e0316faeeb8f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.935768] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.013682] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234285, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.117504] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Releasing lock "refresh_cache-64ab714b-61b6-48be-a2cb-5a5df86f7512" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.117504] env[69796]: DEBUG nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 755.117973] env[69796]: DEBUG nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.118404] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.122319] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234280, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.748591} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.132814] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 8b103adc-9903-406f-8fd1-e193e00cde11/8b103adc-9903-406f-8fd1-e193e00cde11.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.132814] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.136079] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3879272e-8c3a-48f2-971c-49250c39f64e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.146842] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234286, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.147888] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 755.147888] env[69796]: value = "task-4234290" [ 755.147888] env[69796]: _type = "Task" [ 755.147888] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.164686] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234290, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.189719] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234282, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.16998} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.191645] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.198157] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 755.198505] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 755.199796] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7a4f406d-3e0d-4e26-ae9c-14174103b076 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.216981] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 755.216981] env[69796]: value = "task-4234291" [ 755.216981] env[69796]: _type = "Task" [ 755.216981] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.217984] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234287, 'name': Rename_Task, 'duration_secs': 0.195639} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.217984] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.222906] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9775142e-9d5b-4845-87d2-ab755cd82a3d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.235669] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234291, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.238032] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 755.238032] env[69796]: value = "task-4234292" [ 755.238032] env[69796]: _type = "Task" [ 755.238032] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.250036] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234292, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.299026] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846b0aa4-66c3-4ee0-8572-80c2388e709b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.309219] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee96255e-1f76-4b59-80cc-7827666507eb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.344385] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cce0d8f-1bb1-465b-97a0-8c701bab701b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.353347] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521f57b9-133d-4d0a-9f5e-e853f1eb0261 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.371055] env[69796]: DEBUG nova.compute.provider_tree [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 755.419807] env[69796]: DEBUG nova.compute.manager [req-aa4618c2-b66f-4de9-b1c8-538f502b517f req-44b45835-f978-4e0f-8c10-9c1a76ba01db service nova] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Received event network-vif-deleted-8a8a7e47-3d72-4d40-b819-2d51cd634de6 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 755.432112] env[69796]: DEBUG oslo_vmware.api [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Task: {'id': task-4234289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205475} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.432525] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.432653] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 755.433464] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 755.433464] env[69796]: INFO nova.compute.manager [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Took 1.15 seconds to destroy the instance on the hypervisor. [ 755.433575] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.434845] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.435066] env[69796]: DEBUG nova.compute.manager [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 755.435125] env[69796]: DEBUG nova.network.neutron [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 755.510852] env[69796]: DEBUG oslo_vmware.api [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234285, 'name': PowerOnVM_Task, 'duration_secs': 0.738275} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.510852] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.510852] env[69796]: INFO nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Took 12.36 seconds to spawn the instance on the hypervisor. [ 755.511136] env[69796]: DEBUG nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 755.513019] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3914ef-3722-43b4-a29d-148ff35aee02 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.637729] env[69796]: DEBUG oslo_vmware.api [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234286, 'name': PowerOnVM_Task, 'duration_secs': 0.574002} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.638931] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.638931] env[69796]: INFO nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Took 17.15 seconds to spawn the instance on the hypervisor. [ 755.638931] env[69796]: DEBUG nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 755.639390] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f25bb98-3450-4972-85db-36fb9b939400 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.667039] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234290, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089326} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.667039] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.667039] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f38ee7-4e25-4a31-9fad-ac41532b318b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.693828] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 8b103adc-9903-406f-8fd1-e193e00cde11/8b103adc-9903-406f-8fd1-e193e00cde11.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.694770] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db0838f0-a7ec-40f0-b91c-a22521de05e9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.712739] env[69796]: DEBUG nova.network.neutron [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.725357] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 755.725357] env[69796]: value = "task-4234293" [ 755.725357] env[69796]: _type = "Task" [ 755.725357] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.736695] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234291, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080806} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.736695] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.737566] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33da8491-ac02-4c4e-9ed1-7153400d5897 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.745632] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.754683] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234292, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.772089] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.772089] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecf798d2-4ad6-4219-80fc-125725f607fd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.792848] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 755.792848] env[69796]: value = "task-4234294" [ 755.792848] env[69796]: _type = "Task" [ 755.792848] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.802505] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234294, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.892840] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.893208] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.893208] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.893868] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.893868] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.896231] env[69796]: INFO nova.compute.manager [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Terminating instance [ 755.903382] env[69796]: ERROR nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [req-602b3b1c-fe14-4c52-8a93-a79c265364f5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-602b3b1c-fe14-4c52-8a93-a79c265364f5"}]} [ 755.903382] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.903519] env[69796]: ERROR nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Traceback (most recent call last): [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] yield [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] self.set_inventory_for_provider( [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 755.903519] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-602b3b1c-fe14-4c52-8a93-a79c265364f5"}]} [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] During handling of the above exception, another exception occurred: [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Traceback (most recent call last): [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] with self.rt.instance_claim(context, instance, node, allocs, [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 755.903777] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] return f(*args, **kwargs) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] self._update(elevated, cn) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] self._update_to_placement(context, compute_node, startup) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] return attempt.get(self._wrap_exception) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] raise value [ 755.904123] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] self.reportclient.update_from_provider_tree( [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] with catch_all(pd.uuid): [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] self.gen.throw(typ, value, traceback) [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] raise exception.ResourceProviderSyncFailed() [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 755.904552] env[69796]: ERROR nova.compute.manager [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] [ 755.904879] env[69796]: DEBUG nova.compute.utils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 755.904879] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.649s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.906843] env[69796]: INFO nova.compute.claims [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.912019] env[69796]: DEBUG nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Build of instance cc8dbe63-d117-4c8f-9ba5-3de65e642ab5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 755.912019] env[69796]: DEBUG nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 755.912019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Acquiring lock "refresh_cache-cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.912019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Acquired lock "refresh_cache-cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.912934] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.046269] env[69796]: INFO nova.compute.manager [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Took 18.36 seconds to build instance. [ 756.176168] env[69796]: INFO nova.compute.manager [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Took 22.04 seconds to build instance. [ 756.214644] env[69796]: INFO nova.compute.manager [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] [instance: 64ab714b-61b6-48be-a2cb-5a5df86f7512] Took 1.10 seconds to deallocate network for instance. [ 756.239961] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.259693] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234292, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.307836] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234294, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.402026] env[69796]: DEBUG nova.compute.manager [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 756.403111] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.404190] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275898f1-2783-470a-9e33-14442912012d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.421658] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.421658] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0b6d7eb-38ff-4970-a679-b2fceceadd02 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.430671] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 756.430671] env[69796]: value = "task-4234295" [ 756.430671] env[69796]: _type = "Task" [ 756.430671] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.446067] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234295, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.459730] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.526666] env[69796]: DEBUG nova.network.neutron [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.549366] env[69796]: DEBUG oslo_concurrency.lockutils [None req-612bc542-7ae0-490c-87f4-a075ffac496f tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.874s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.678404] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4bef1b05-a9b6-4419-9629-4c2bba7b2c1a tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.554s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.680788] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.908s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.681478] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.683604] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d605b301-9ccc-4ad1-86f2-8f51cd6c3bdd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.742650] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234293, 'name': ReconfigVM_Task, 'duration_secs': 0.779349} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.742996] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 8b103adc-9903-406f-8fd1-e193e00cde11/8b103adc-9903-406f-8fd1-e193e00cde11.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.744028] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20cb26e5-c918-4bf5-b0d2-790da8b087d3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.759308] env[69796]: DEBUG oslo_vmware.api [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234292, 'name': PowerOnVM_Task, 'duration_secs': 1.213752} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.762167] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.762167] env[69796]: INFO nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Took 15.92 seconds to spawn the instance on the hypervisor. [ 756.762428] env[69796]: DEBUG nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 756.762825] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 756.762825] env[69796]: value = "task-4234296" [ 756.762825] env[69796]: _type = "Task" [ 756.762825] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.764112] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af509c3f-6ec1-4179-b1d6-1453da91be22 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.782936] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234296, 'name': Rename_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.806480] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234294, 'name': ReconfigVM_Task, 'duration_secs': 0.744682} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.806480] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 756.807012] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b8cfc3b-115d-4250-9462-cc6c0f4cd000 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.815598] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 756.815598] env[69796]: value = "task-4234297" [ 756.815598] env[69796]: _type = "Task" [ 756.815598] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.829802] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234297, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.948452] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234295, 'name': PowerOffVM_Task, 'duration_secs': 0.208567} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.948781] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.948933] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.950710] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3944f82-5e47-4ad0-8585-e93c6a8f82b2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.952841] env[69796]: DEBUG nova.scheduler.client.report [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 756.973543] env[69796]: DEBUG nova.scheduler.client.report [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 756.973543] env[69796]: DEBUG nova.compute.provider_tree [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 756.988788] env[69796]: DEBUG nova.scheduler.client.report [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 757.013470] env[69796]: DEBUG nova.scheduler.client.report [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 757.033343] env[69796]: INFO nova.compute.manager [-] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Took 1.60 seconds to deallocate network for instance. [ 757.055373] env[69796]: DEBUG nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.064028] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 757.064028] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 757.064028] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Deleting the datastore file [datastore2] 47005af8-11fe-498f-9b67-e0316faeeb8f {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 757.064028] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8185725c-54bf-4058-ab0f-f1f2eb69c262 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.072000] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for the task: (returnval){ [ 757.072000] env[69796]: value = "task-4234299" [ 757.072000] env[69796]: _type = "Task" [ 757.072000] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.087749] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234299, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.184671] env[69796]: DEBUG nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 757.191241] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Releasing lock "refresh_cache-cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.191375] env[69796]: DEBUG nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 757.191595] env[69796]: DEBUG nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.191769] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.203681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.523s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.237023] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.267412] env[69796]: INFO nova.scheduler.client.report [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Deleted allocations for instance 64ab714b-61b6-48be-a2cb-5a5df86f7512 [ 757.310954] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234296, 'name': Rename_Task, 'duration_secs': 0.224505} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.311360] env[69796]: INFO nova.compute.manager [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Took 21.84 seconds to build instance. [ 757.315401] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.316309] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64dc84f9-41de-40dc-9a12-23febda8bec4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.328130] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234297, 'name': Rename_Task, 'duration_secs': 0.185327} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.330559] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 757.331259] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 757.331259] env[69796]: value = "task-4234300" [ 757.331259] env[69796]: _type = "Task" [ 757.331259] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.334116] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b3c491-de1d-4477-a2ef-c5e174c8db88 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.346841] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234300, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.348232] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 757.348232] env[69796]: value = "task-4234301" [ 757.348232] env[69796]: _type = "Task" [ 757.348232] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.362621] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.423144] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ef4baf-ef17-4f93-b23a-1c938a1dfb4e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.435081] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b5daa4-467b-4657-bb76-dc4a9665d34d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.473045] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2e29cc-70de-45b0-8142-a2442b020c64 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.483420] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4627c988-b8c4-45c3-abe8-4e1b82b810fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.501377] env[69796]: DEBUG nova.compute.provider_tree [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.540492] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.590664] env[69796]: DEBUG oslo_vmware.api [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Task: {'id': task-4234299, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.414446} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.590933] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.592366] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.592366] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.592366] env[69796]: INFO nova.compute.manager [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 757.592627] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.592627] env[69796]: DEBUG nova.compute.manager [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.592876] env[69796]: DEBUG nova.network.neutron [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.596368] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.725027] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.738766] env[69796]: DEBUG nova.network.neutron [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.802237] env[69796]: DEBUG oslo_concurrency.lockutils [None req-80df2024-cd71-47ee-974b-b7379f9080a3 tempest-InstanceActionsTestJSON-1066518015 tempest-InstanceActionsTestJSON-1066518015-project-member] Lock "64ab714b-61b6-48be-a2cb-5a5df86f7512" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.460s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.817648] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e664f57d-0a2e-4fe6-9820-35a812e71814 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.358s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.858507] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234300, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.872821] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234301, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.009217] env[69796]: DEBUG nova.scheduler.client.report [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.028312] env[69796]: DEBUG nova.compute.manager [req-09b9e965-b286-4315-8989-d6ab7f377b62 req-daf013b2-6d29-4880-8790-ffa5bb7a4e8d service nova] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Received event network-vif-deleted-dcea5761-7cad-4443-a674-5ca2c4994581 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 758.242644] env[69796]: INFO nova.compute.manager [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] [instance: cc8dbe63-d117-4c8f-9ba5-3de65e642ab5] Took 1.05 seconds to deallocate network for instance. [ 758.313023] env[69796]: DEBUG nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.356746] env[69796]: DEBUG oslo_vmware.api [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234300, 'name': PowerOnVM_Task, 'duration_secs': 0.668459} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.359684] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.359684] env[69796]: INFO nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Took 12.80 seconds to spawn the instance on the hypervisor. [ 758.359684] env[69796]: DEBUG nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 758.359684] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b3003e-bcea-481b-970c-520a6179d870 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.384648] env[69796]: DEBUG oslo_vmware.api [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234301, 'name': PowerOnVM_Task, 'duration_secs': 0.583406} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.385017] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 758.385316] env[69796]: DEBUG nova.compute.manager [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 758.386426] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47bbf19-387b-4dab-b0d1-6bb2a9939966 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.517105] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.517691] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 758.523241] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.507s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.524850] env[69796]: INFO nova.compute.claims [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.644826] env[69796]: DEBUG nova.network.neutron [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.845045] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.891081] env[69796]: INFO nova.compute.manager [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Took 20.56 seconds to build instance. [ 758.916312] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.032084] env[69796]: DEBUG nova.compute.utils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 759.034755] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 759.035122] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 759.109396] env[69796]: DEBUG nova.policy [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a43254072e94663b1618723e4a80e18', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56e348a3093e4519b918105e54ffc10b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 759.147687] env[69796]: INFO nova.compute.manager [-] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Took 1.55 seconds to deallocate network for instance. [ 759.291840] env[69796]: INFO nova.scheduler.client.report [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Deleted allocations for instance cc8dbe63-d117-4c8f-9ba5-3de65e642ab5 [ 759.393217] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ad7a4035-c72c-469c-ac77-b178641e8fc3 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.074s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.548158] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.658611] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.749592] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Successfully created port: 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.804540] env[69796]: DEBUG oslo_concurrency.lockutils [None req-cd2063f9-fb96-434f-b4c5-5e01a14828d0 tempest-ServerGroupTestJSON-1296098590 tempest-ServerGroupTestJSON-1296098590-project-member] Lock "cc8dbe63-d117-4c8f-9ba5-3de65e642ab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.109s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.853630] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d20b7d7-8451-4596-b70b-2c9e9b9d725e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.863400] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e82e31-34d7-474c-bc63-514382346d95 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.902040] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a77ff0-7bb1-4981-b13f-c2942275b5b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.913150] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ad9e87-aa92-43de-ae04-d1ebcfa67d75 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.934534] env[69796]: DEBUG nova.compute.provider_tree [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.233024] env[69796]: DEBUG nova.compute.manager [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 760.233024] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05dffef0-015d-4aa7-89d4-017c25652031 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.439050] env[69796]: DEBUG nova.scheduler.client.report [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.572938] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.615017] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.615017] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.615017] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.615270] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.615362] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.615540] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.615762] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.615919] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.616123] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.616295] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.616479] env[69796]: DEBUG nova.virt.hardware [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.617470] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea8ebcf-0a6a-4aa4-a7da-57d3fdcea8e2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.630205] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22690f55-ce4c-4670-80e7-90ba7fbb2670 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.747039] env[69796]: INFO nova.compute.manager [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] instance snapshotting [ 760.747710] env[69796]: DEBUG nova.objects.instance [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lazy-loading 'flavor' on Instance uuid 7f37f6c9-adba-4292-9d47-c455f77e539f {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.948063] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.948063] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 760.954410] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.829s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.954672] env[69796]: DEBUG nova.objects.instance [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lazy-loading 'resources' on Instance uuid d0e1a7df-f83f-43c2-a387-d2a378ff31b6 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.168284] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Acquiring lock "12b7c520-b21f-48d1-a1fe-6c12dcb713cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.168957] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Lock "12b7c520-b21f-48d1-a1fe-6c12dcb713cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.257409] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed456815-2e0a-41be-8dd1-b38e4f3bb83c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.279112] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4d98a1-ae8c-474a-9def-75f02094cc4d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.387596] env[69796]: INFO nova.compute.manager [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Rebuilding instance [ 761.444297] env[69796]: DEBUG nova.compute.manager [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 761.447591] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c8fb62-00b6-4858-b116-1fed75b05093 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.468766] env[69796]: DEBUG nova.compute.utils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.472328] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.473789] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.650690] env[69796]: DEBUG nova.policy [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67320fe7673a4b1d9d65b3c8c3c8ad4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6317908a1b7243c090b4db6755634bce', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.675993] env[69796]: DEBUG nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 761.679528] env[69796]: DEBUG nova.compute.manager [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Received event network-changed-65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 761.679720] env[69796]: DEBUG nova.compute.manager [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Refreshing instance network info cache due to event network-changed-65b8c00f-8a84-4930-a8b4-c7a8e994421b. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 761.679933] env[69796]: DEBUG oslo_concurrency.lockutils [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] Acquiring lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.680092] env[69796]: DEBUG oslo_concurrency.lockutils [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] Acquired lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.684088] env[69796]: DEBUG nova.network.neutron [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Refreshing network info cache for port 65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.789234] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d66567-26dc-4fd1-8d13-c320867c1600 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.794113] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Creating Snapshot of the VM instance {{(pid=69796) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 761.794113] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c59aea42-4013-4bbb-8712-46872bb9a75b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.801316] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc31ecd3-d3c2-49a9-88d1-752a00477b15 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.807325] env[69796]: DEBUG oslo_vmware.api [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 761.807325] env[69796]: value = "task-4234302" [ 761.807325] env[69796]: _type = "Task" [ 761.807325] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.843533] env[69796]: DEBUG nova.compute.manager [req-983d6e42-a882-40e4-a490-71c6df2d2427 req-50a9849c-fa0b-4810-ba57-e4c3329a5e60 service nova] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Received event network-vif-deleted-6196c31c-6907-4695-91cf-2de0c3cac58f {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 761.844421] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f06ef77-260d-4fc3-9b10-ae3820dabb09 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.850870] env[69796]: DEBUG oslo_vmware.api [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234302, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.857625] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52a962b-0b43-4f42-b0c4-23f8db2f7dfd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.874242] env[69796]: DEBUG nova.compute.provider_tree [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.980406] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.154816] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "7f37f6c9-adba-4292-9d47-c455f77e539f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.155105] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.155317] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "7f37f6c9-adba-4292-9d47-c455f77e539f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.158387] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.158387] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.159930] env[69796]: INFO nova.compute.manager [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Terminating instance [ 762.212439] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.326086] env[69796]: DEBUG oslo_vmware.api [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234302, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.380179] env[69796]: DEBUG nova.scheduler.client.report [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.468141] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.468141] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a75ec6c-382b-4359-bee3-3382dabbdd3c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.478773] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 762.478773] env[69796]: value = "task-4234303" [ 762.478773] env[69796]: _type = "Task" [ 762.478773] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.498686] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234303, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.663959] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "refresh_cache-7f37f6c9-adba-4292-9d47-c455f77e539f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.663959] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquired lock "refresh_cache-7f37f6c9-adba-4292-9d47-c455f77e539f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.664385] env[69796]: DEBUG nova.network.neutron [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.824711] env[69796]: DEBUG oslo_vmware.api [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234302, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.886185] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.931s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.891292] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.308s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.892749] env[69796]: INFO nova.compute.claims [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.911298] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Successfully updated port: 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.929585] env[69796]: INFO nova.scheduler.client.report [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Deleted allocations for instance d0e1a7df-f83f-43c2-a387-d2a378ff31b6 [ 762.993635] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 762.997264] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234303, 'name': PowerOffVM_Task, 'duration_secs': 0.126321} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.997786] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.998032] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.998859] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f75622d-e50b-49f5-9f37-e82e16e6ba9e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.012601] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 763.013201] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-459660b1-1f0d-4829-a5d4-fe9fe1a3ca95 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.026020] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:19:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1132328641',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-63978610',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.026020] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.026020] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.026295] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.026295] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.026295] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.026295] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.026295] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.026447] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.027888] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.028412] env[69796]: DEBUG nova.virt.hardware [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.030056] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1435b9-79f6-4108-a532-660c0747450b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.042032] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb13468d-5073-409a-a6ba-0861dadcbb68 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.050226] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 763.050730] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 763.051083] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Deleting the datastore file [datastore1] 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 763.051939] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a157a5b-f403-467a-acc4-b210ee2fb765 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.070269] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 763.070269] env[69796]: value = "task-4234305" [ 763.070269] env[69796]: _type = "Task" [ 763.070269] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.080124] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.198339] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Successfully created port: e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.201215] env[69796]: DEBUG nova.network.neutron [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.328026] env[69796]: DEBUG oslo_vmware.api [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234302, 'name': CreateSnapshot_Task, 'duration_secs': 1.172081} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.328026] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Created Snapshot of the VM instance {{(pid=69796) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 763.328026] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f74eac-311b-4af7-b982-ad24c6dff032 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.346643] env[69796]: DEBUG nova.network.neutron [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.418062] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.418275] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquired lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.418463] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 763.440210] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9b5d8c27-b089-44ed-904e-0d412a946cb7 tempest-DeleteServersAdminTestJSON-518039465 tempest-DeleteServersAdminTestJSON-518039465-project-admin] Lock "d0e1a7df-f83f-43c2-a387-d2a378ff31b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.908s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.585387] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213921} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.585825] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.590130] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.590130] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.757820] env[69796]: DEBUG nova.network.neutron [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updated VIF entry in instance network info cache for port 65b8c00f-8a84-4930-a8b4-c7a8e994421b. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.757820] env[69796]: DEBUG nova.network.neutron [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updating instance_info_cache with network_info: [{"id": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "address": "fa:16:3e:9c:f6:53", "network": {"id": "5cb7def8-e2c6-4b9b-9ea6-674c790c87f1", "bridge": "br-int", "label": "tempest-ServersTestJSON-758945376-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "54a54b9449f347b99d71c1bc2029cbeb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d5970ab5-34b8-4065-bfa6-f568b8f103b7", "external-id": "nsx-vlan-transportzone-418", "segmentation_id": 418, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65b8c00f-8a", "ovs_interfaceid": "65b8c00f-8a84-4930-a8b4-c7a8e994421b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.845882] env[69796]: DEBUG nova.compute.manager [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Instance disappeared during snapshot {{(pid=69796) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 763.849677] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Releasing lock "refresh_cache-7f37f6c9-adba-4292-9d47-c455f77e539f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.850160] env[69796]: DEBUG nova.compute.manager [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 763.850356] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.851296] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b60021-977c-4ff7-8780-bd5cf4caacaa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.869263] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 763.869627] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bab04837-e7ad-4efd-b3d2-5d4f5a414b25 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.884354] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 763.884354] env[69796]: value = "task-4234306" [ 763.884354] env[69796]: _type = "Task" [ 763.884354] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.900300] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.019644] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "4f4449ab-939d-4d96-9cd0-419a121575cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.019735] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "4f4449ab-939d-4d96-9cd0-419a121575cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.035257] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.066115] env[69796]: DEBUG nova.compute.manager [None req-d66e1594-a81e-4ea2-8e46-a926c246cf0b tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Found 0 images (rotation: 2) {{(pid=69796) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 764.237857] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6281d845-d247-468d-b932-f8203983c87b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.247134] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b468a4fd-58d2-4364-a20e-92d33c202f13 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.285027] env[69796]: DEBUG oslo_concurrency.lockutils [req-11ddaccb-530a-4ad4-b256-d58758a64493 req-bc01e0b9-f158-4dfa-ad57-7cf24d227939 service nova] Releasing lock "refresh_cache-836605ee-50cb-48b0-ba2e-33db3832f8ba" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.285884] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8ccaac-0db5-49d7-bd16-cfda022c032c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.295697] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0c7ce6-ed50-4c3c-aa4d-d25b749b8efb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.316987] env[69796]: DEBUG nova.compute.provider_tree [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.396563] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234306, 'name': PowerOffVM_Task, 'duration_secs': 0.313778} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.397431] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 764.398272] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.399105] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-480aa8b4-6241-4714-845e-71f04e619915 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.431912] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.434459] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.434459] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleting the datastore file [datastore1] 7f37f6c9-adba-4292-9d47-c455f77e539f {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.434459] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6de74b12-4223-4ff9-b814-4e0d1e2985b8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.442881] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for the task: (returnval){ [ 764.442881] env[69796]: value = "task-4234308" [ 764.442881] env[69796]: _type = "Task" [ 764.442881] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.457838] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234308, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.523658] env[69796]: DEBUG nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.638996] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.639279] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.639439] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.639622] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.639771] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.639921] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.640147] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.640314] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.640913] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.640913] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.640913] env[69796]: DEBUG nova.virt.hardware [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.642497] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70e50bb-a21d-492b-a87a-649922fb3cf9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.654040] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555e7580-74fa-46b4-996c-da5a0bd86ee0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.659406] env[69796]: DEBUG nova.network.neutron [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating instance_info_cache with network_info: [{"id": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "address": "fa:16:3e:f8:3c:db", "network": {"id": "229c13a1-394f-41e3-b02e-fdbf0dcb47d3", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-483928764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56e348a3093e4519b918105e54ffc10b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c9189a9-01", "ovs_interfaceid": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.678769] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance VIF info [] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.684878] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.685843] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.687766] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12bddedd-d23e-4a54-b66a-44c7d59eb2ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.708557] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.708557] env[69796]: value = "task-4234309" [ 764.708557] env[69796]: _type = "Task" [ 764.708557] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.719461] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234309, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.820660] env[69796]: DEBUG nova.scheduler.client.report [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.961299] env[69796]: DEBUG oslo_vmware.api [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Task: {'id': task-4234308, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.303978} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.961299] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 764.961299] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 764.961646] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 764.961741] env[69796]: INFO nova.compute.manager [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 764.962443] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.962724] env[69796]: DEBUG nova.compute.manager [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 764.962855] env[69796]: DEBUG nova.network.neutron [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 764.997410] env[69796]: DEBUG nova.network.neutron [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 765.060948] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.131931] env[69796]: DEBUG nova.compute.manager [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Received event network-vif-plugged-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 765.132154] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.132358] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.132622] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.132693] env[69796]: DEBUG nova.compute.manager [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] No waiting events found dispatching network-vif-plugged-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 765.132852] env[69796]: WARNING nova.compute.manager [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Received unexpected event network-vif-plugged-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e for instance with vm_state building and task_state spawning. [ 765.133020] env[69796]: DEBUG nova.compute.manager [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Received event network-changed-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 765.133225] env[69796]: DEBUG nova.compute.manager [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Refreshing instance network info cache due to event network-changed-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 765.133371] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Acquiring lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.162946] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Releasing lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.163209] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Instance network_info: |[{"id": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "address": "fa:16:3e:f8:3c:db", "network": {"id": "229c13a1-394f-41e3-b02e-fdbf0dcb47d3", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-483928764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56e348a3093e4519b918105e54ffc10b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c9189a9-01", "ovs_interfaceid": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 765.163979] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Acquired lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.163979] env[69796]: DEBUG nova.network.neutron [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Refreshing network info cache for port 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 765.166378] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:3c:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c9189a9-01dd-42e1-b2b3-9d0f3f53448e', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.176392] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Creating folder: Project (56e348a3093e4519b918105e54ffc10b). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 765.177783] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-95507dd1-403c-4f23-8b6a-3d6f01c5f85d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.190080] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Created folder: Project (56e348a3093e4519b918105e54ffc10b) in parent group-v837766. [ 765.190301] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Creating folder: Instances. Parent ref: group-v837794. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 765.190559] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a314567-ad56-44a2-99e0-e26fe5a93e3f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.204051] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Created folder: Instances in parent group-v837794. [ 765.204486] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.204694] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.204906] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f0f9c55-168a-453c-9e54-33d71b2827b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.233661] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234309, 'name': CreateVM_Task, 'duration_secs': 0.324362} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.235299] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.235299] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.235299] env[69796]: value = "task-4234312" [ 765.235299] env[69796]: _type = "Task" [ 765.235299] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.236168] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.236168] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.236290] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.236598] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a906df-a44f-4e78-8d0e-e62e29bbb398 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.246447] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 765.246447] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b0fa33-7edd-f6ce-6ecd-338d738f2fee" [ 765.246447] env[69796]: _type = "Task" [ 765.246447] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.249398] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234312, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.261964] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b0fa33-7edd-f6ce-6ecd-338d738f2fee, 'name': SearchDatastore_Task, 'duration_secs': 0.011431} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.261964] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.261964] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.261964] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.262232] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.262232] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.262232] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8a73363-1640-47fe-899e-44227d3b8cc5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.273523] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.273523] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.274338] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce238dc5-f040-4985-a8d5-618d586f6e63 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.282493] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 765.282493] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f4813e-c114-ec83-a729-d417c467544a" [ 765.282493] env[69796]: _type = "Task" [ 765.282493] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.293213] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f4813e-c114-ec83-a729-d417c467544a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.328529] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.439s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.328997] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.331910] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.897s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.332908] env[69796]: INFO nova.compute.claims [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.501523] env[69796]: DEBUG nova.network.neutron [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.753115] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234312, 'name': CreateVM_Task, 'duration_secs': 0.367732} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.753304] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.755424] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.755906] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.756340] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.756515] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62360458-3c23-4f87-a4c3-e33914f3d524 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.766950] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 765.766950] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5219ecf1-1222-a7d1-9acc-f718206a3d44" [ 765.766950] env[69796]: _type = "Task" [ 765.766950] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.777481] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5219ecf1-1222-a7d1-9acc-f718206a3d44, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.794059] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f4813e-c114-ec83-a729-d417c467544a, 'name': SearchDatastore_Task, 'duration_secs': 0.012277} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.794955] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c1f6063-6225-4f34-b945-7afc0d7ecfb2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.801954] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 765.801954] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529e2966-a53e-520c-7f3c-6bbfdc5434a9" [ 765.801954] env[69796]: _type = "Task" [ 765.801954] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.812565] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529e2966-a53e-520c-7f3c-6bbfdc5434a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.837677] env[69796]: DEBUG nova.compute.utils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.843025] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 765.843025] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.992789] env[69796]: DEBUG nova.policy [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0d1ebdf07ac045188dc9d71dc1120da1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed7b2e738b0045d5981e862f2b1cecc2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.003805] env[69796]: INFO nova.compute.manager [-] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Took 1.04 seconds to deallocate network for instance. [ 766.282616] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5219ecf1-1222-a7d1-9acc-f718206a3d44, 'name': SearchDatastore_Task, 'duration_secs': 0.011216} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.282788] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.282904] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.283696] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.313286] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]529e2966-a53e-520c-7f3c-6bbfdc5434a9, 'name': SearchDatastore_Task, 'duration_secs': 0.011771} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.313577] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.313904] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.314325] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.314497] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.314710] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-218f3b4b-3d81-4b02-9edb-a443c0410274 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.318700] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7e8de19-e078-4d71-b081-aa6cce22c282 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.329751] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 766.329751] env[69796]: value = "task-4234313" [ 766.329751] env[69796]: _type = "Task" [ 766.329751] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.331890] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.331890] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.337170] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f562256-cdbb-4d19-b2a3-37d5966130d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.343029] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.353737] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.353737] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 766.353737] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ed5bef-8a44-6b4d-f010-9a18780a85a3" [ 766.353737] env[69796]: _type = "Task" [ 766.353737] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.374372] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "8b103adc-9903-406f-8fd1-e193e00cde11" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.374724] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.376452] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.376452] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.376452] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.377082] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ed5bef-8a44-6b4d-f010-9a18780a85a3, 'name': SearchDatastore_Task, 'duration_secs': 0.014465} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.378797] env[69796]: INFO nova.compute.manager [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Terminating instance [ 766.384771] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccf43c77-d3f0-4ca0-9cf3-f63142a44571 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.406123] env[69796]: DEBUG nova.network.neutron [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updated VIF entry in instance network info cache for port 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.406123] env[69796]: DEBUG nova.network.neutron [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating instance_info_cache with network_info: [{"id": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "address": "fa:16:3e:f8:3c:db", "network": {"id": "229c13a1-394f-41e3-b02e-fdbf0dcb47d3", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-483928764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56e348a3093e4519b918105e54ffc10b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c9189a9-01", "ovs_interfaceid": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.407580] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 766.407580] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52dbcf2c-4624-2500-3319-e1eca9b6437e" [ 766.407580] env[69796]: _type = "Task" [ 766.407580] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.425315] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52dbcf2c-4624-2500-3319-e1eca9b6437e, 'name': SearchDatastore_Task, 'duration_secs': 0.011619} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.425584] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.425902] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714/9a0e9a08-1176-4f88-bbcd-f0f52d3d7714.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.426289] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b89411c-6727-41c9-80d8-c22e920d829d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.440322] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 766.440322] env[69796]: value = "task-4234314" [ 766.440322] env[69796]: _type = "Task" [ 766.440322] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.464560] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.514464] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.672147] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Successfully updated port: e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 766.746721] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec8244b-3fb2-4521-8bc8-73724c13f628 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.761785] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5638a469-b9ee-46f6-9f51-589e1a853c8f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.826245] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e9be7d-5d3f-4981-9de7-f11921f7bc79 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.841185] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456bbacf-a68b-431b-9612-026fddd6b241 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.850336] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234313, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.878630] env[69796]: DEBUG nova.compute.provider_tree [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.885708] env[69796]: DEBUG nova.compute.manager [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 766.886252] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.889719] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a207c99-a678-45e6-92f4-689b64ca1d90 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.903387] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 766.905598] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-102862e4-74dd-4d46-aed5-3ecbf3dc72fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.911146] env[69796]: DEBUG oslo_concurrency.lockutils [req-7b186a4c-95ec-4133-96b1-8d5d4753c0f7 req-976371b4-15e4-418e-a6ca-17cb80fcb6fd service nova] Releasing lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.915217] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 766.915217] env[69796]: value = "task-4234315" [ 766.915217] env[69796]: _type = "Task" [ 766.915217] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.929142] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.964066] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234314, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.073548] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Acquiring lock "21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.074302] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Lock "21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.179245] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.179245] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquired lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.179545] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 767.311040] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Successfully created port: 1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.347673] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234313, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765853} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.348144] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.348274] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.348691] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eeac1473-9ea4-4745-8022-75916d2474b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.359281] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 767.359281] env[69796]: value = "task-4234316" [ 767.359281] env[69796]: _type = "Task" [ 767.359281] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.377362] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234316, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.381984] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.385231] env[69796]: DEBUG nova.scheduler.client.report [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.393669] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "5747cb37-539e-4532-a627-282f965a7dd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.394260] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "5747cb37-539e-4532-a627-282f965a7dd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.425929] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.425929] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.425929] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.426189] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.426189] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.426189] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.426355] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.426685] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.426685] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.426885] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.426997] env[69796]: DEBUG nova.virt.hardware [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.428528] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a423be-360e-4212-8ce7-671275143e24 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.436111] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234315, 'name': PowerOffVM_Task, 'duration_secs': 0.286855} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.436872] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 767.437117] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 767.437708] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ce02840-3ecb-42b8-b0d9-bfe36bdd057f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.449988] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a4167e-eb48-47f3-a727-8da62d6267d4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.461599] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234314, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.000732} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.475712] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714/9a0e9a08-1176-4f88-bbcd-f0f52d3d7714.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.476143] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.478145] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d284eb1d-8c0c-427c-8fdb-42e1ed44c2ac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.488213] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 767.488213] env[69796]: value = "task-4234318" [ 767.488213] env[69796]: _type = "Task" [ 767.488213] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.498617] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234318, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.527189] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 767.527462] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 767.527709] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Deleting the datastore file [datastore1] 8b103adc-9903-406f-8fd1-e193e00cde11 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 767.528092] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5789b148-7b44-4038-ac70-26e36eada315 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.535647] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for the task: (returnval){ [ 767.535647] env[69796]: value = "task-4234319" [ 767.535647] env[69796]: _type = "Task" [ 767.535647] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.545084] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.581666] env[69796]: DEBUG nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 767.769394] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.871809] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234316, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092137} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.873711] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.875680] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31d07d11-b887-426e-a9eb-0773c6b7ab63 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.894704] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.895148] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 767.906415] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.907016] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.367s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.907258] env[69796]: DEBUG nova.objects.instance [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lazy-loading 'resources' on Instance uuid 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 767.908424] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a6353c2-32d4-410f-9f06-7c44eb57966b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.930280] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 767.930280] env[69796]: value = "task-4234320" [ 767.930280] env[69796]: _type = "Task" [ 767.930280] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.944028] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.000182] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234318, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156988} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.001201] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.001883] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fda311-7fd0-42a4-925f-46fa71d0e644 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.028738] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714/9a0e9a08-1176-4f88-bbcd-f0f52d3d7714.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.029592] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a20219bd-600a-42ba-be77-e3d2f54bd0c4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.056529] env[69796]: DEBUG oslo_vmware.api [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Task: {'id': task-4234319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178015} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.057924] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 768.058134] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 768.058307] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 768.058482] env[69796]: INFO nova.compute.manager [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Took 1.17 seconds to destroy the instance on the hypervisor. [ 768.058724] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.058970] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 768.058970] env[69796]: value = "task-4234321" [ 768.058970] env[69796]: _type = "Task" [ 768.058970] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.059170] env[69796]: DEBUG nova.compute.manager [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 768.059283] env[69796]: DEBUG nova.network.neutron [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 768.071364] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234321, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.109829] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.280344] env[69796]: DEBUG nova.compute.manager [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Received event network-vif-plugged-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 768.280494] env[69796]: DEBUG oslo_concurrency.lockutils [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] Acquiring lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.280840] env[69796]: DEBUG oslo_concurrency.lockutils [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.282200] env[69796]: DEBUG oslo_concurrency.lockutils [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.282522] env[69796]: DEBUG nova.compute.manager [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] No waiting events found dispatching network-vif-plugged-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 768.282809] env[69796]: WARNING nova.compute.manager [req-bec05d97-8098-47b7-9f7f-253029fa1d3a req-9d3df758-2881-4ff8-921e-9ff1bda32682 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Received unexpected event network-vif-plugged-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 for instance with vm_state building and task_state spawning. [ 768.410085] env[69796]: DEBUG nova.compute.utils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.416366] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.416697] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.450526] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234320, 'name': ReconfigVM_Task, 'duration_secs': 0.333783} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.450919] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9/38792225-b054-4c08-b3ec-51d46287b0f9.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.451801] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3818730-a096-427a-abe0-f6fdd423ace4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.464064] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 768.464064] env[69796]: value = "task-4234322" [ 768.464064] env[69796]: _type = "Task" [ 768.464064] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.473592] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234322, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.513499] env[69796]: DEBUG nova.policy [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '740cd7ff21db4850be6bbb796a59af08', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd8f0e4ba0d94de18857def9ca4832d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.578025] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234321, 'name': ReconfigVM_Task, 'duration_secs': 0.347713} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.578814] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714/9a0e9a08-1176-4f88-bbcd-f0f52d3d7714.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.580155] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da1b25a5-09f9-484c-a875-1fc65040ec9d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.592911] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 768.592911] env[69796]: value = "task-4234323" [ 768.592911] env[69796]: _type = "Task" [ 768.592911] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.611772] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234323, 'name': Rename_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.767150] env[69796]: DEBUG nova.network.neutron [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updating instance_info_cache with network_info: [{"id": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "address": "fa:16:3e:e8:f7:b7", "network": {"id": "f37276b1-9061-44cd-8bdf-392262418d58", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1623894011-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6317908a1b7243c090b4db6755634bce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f8d87-55", "ovs_interfaceid": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.813070] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db14451a-2a96-41fb-ab21-8690dd845b37 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.826364] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d218739b-4da1-408f-8616-bdc74c3f6caa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.871282] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acee0e24-d8f4-43c1-ad47-b8308a54dce5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.882955] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33aaf99c-0fb1-4ac0-89e0-386a8a35ad1f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.900720] env[69796]: DEBUG nova.compute.provider_tree [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.921611] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 768.975266] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234322, 'name': Rename_Task, 'duration_secs': 0.146689} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.976354] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.976521] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-046afabe-66dd-4eb5-831f-6d5463188ab7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.985212] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Waiting for the task: (returnval){ [ 768.985212] env[69796]: value = "task-4234324" [ 768.985212] env[69796]: _type = "Task" [ 768.985212] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.996775] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.109215] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234323, 'name': Rename_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.276923] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Releasing lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.277309] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Instance network_info: |[{"id": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "address": "fa:16:3e:e8:f7:b7", "network": {"id": "f37276b1-9061-44cd-8bdf-392262418d58", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1623894011-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6317908a1b7243c090b4db6755634bce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f8d87-55", "ovs_interfaceid": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 769.277796] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:f7:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3117b312-701b-4439-b197-96b6c5cdca89', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e84f8d87-5538-4cfd-ac81-c58c1b4fca74', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 769.287109] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Creating folder: Project (6317908a1b7243c090b4db6755634bce). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.287849] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32014f33-773a-4a5b-ba0e-26c0d22453c8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.300214] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Created folder: Project (6317908a1b7243c090b4db6755634bce) in parent group-v837766. [ 769.300609] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Creating folder: Instances. Parent ref: group-v837797. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 769.300905] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dea7a9e7-777b-42b4-8c67-9fbe99216fdc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.311331] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Created folder: Instances in parent group-v837797. [ 769.311598] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 769.311825] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 769.312056] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70689508-5eaa-43e1-9204-9c60668f7930 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.338525] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Successfully created port: 55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.347211] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 769.347211] env[69796]: value = "task-4234327" [ 769.347211] env[69796]: _type = "Task" [ 769.347211] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.367821] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234327, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.441669] env[69796]: ERROR nova.scheduler.client.report [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [req-f0f733f3-d2b6-4743-b7d9-680ebc1e6b28] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f0f733f3-d2b6-4743-b7d9-680ebc1e6b28"}]} [ 769.442073] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.535s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.442858] env[69796]: ERROR nova.compute.manager [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Traceback (most recent call last): [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] yield [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self.set_inventory_for_provider( [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 769.442858] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f0f733f3-d2b6-4743-b7d9-680ebc1e6b28"}]} [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] During handling of the above exception, another exception occurred: [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Traceback (most recent call last): [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self._delete_instance(context, instance, bdms) [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 769.443222] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self._complete_deletion(context, instance) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self._update_resource_tracker(context, instance) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self.rt.update_usage(context, instance, instance.node) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] return f(*args, **kwargs) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self._update(context.elevated(), self.compute_nodes[nodename]) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self._update_to_placement(context, compute_node, startup) [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 769.443604] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] return attempt.get(self._wrap_exception) [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] six.reraise(self.value[0], self.value[1], self.value[2]) [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] raise value [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self.reportclient.update_from_provider_tree( [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] with catch_all(pd.uuid): [ 769.444111] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 769.444624] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] self.gen.throw(typ, value, traceback) [ 769.444624] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 769.444624] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] raise exception.ResourceProviderSyncFailed() [ 769.444624] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 769.444624] env[69796]: ERROR nova.compute.manager [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] [ 769.449976] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.853s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.451211] env[69796]: INFO nova.compute.claims [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.509855] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234324, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.608790] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234323, 'name': Rename_Task, 'duration_secs': 0.934474} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.609738] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 769.609738] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4e7864e-49f3-4567-a02c-5c6c72b2e555 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.617730] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 769.617730] env[69796]: value = "task-4234328" [ 769.617730] env[69796]: _type = "Task" [ 769.617730] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.627074] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234328, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.752164] env[69796]: DEBUG nova.network.neutron [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.861320] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234327, 'name': CreateVM_Task, 'duration_secs': 0.399997} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.861488] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 769.862269] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.862474] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.862804] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 769.863253] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09774c12-32c7-436d-bd3c-dd7646732c00 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.869779] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 769.869779] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52bd6e50-7195-7841-7a82-06e44725aef4" [ 769.869779] env[69796]: _type = "Task" [ 769.869779] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.878417] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52bd6e50-7195-7841-7a82-06e44725aef4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.932704] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 769.958498] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.183s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.978440] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 769.978840] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.978915] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 769.979355] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.979926] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 769.979926] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 769.980490] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 769.980490] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 769.980665] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 769.981050] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 769.981345] env[69796]: DEBUG nova.virt.hardware [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 769.983021] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c321da-904e-4417-af6f-03f9ed57787c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.996966] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ba74c7-2424-4036-96d1-8458d1432ed9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.008521] env[69796]: DEBUG oslo_vmware.api [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Task: {'id': task-4234324, 'name': PowerOnVM_Task, 'duration_secs': 0.531243} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.017718] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.018045] env[69796]: DEBUG nova.compute.manager [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 770.020017] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f100c39-bebf-4b7d-93df-6e6ad01a6d54 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.131407] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234328, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.258621] env[69796]: INFO nova.compute.manager [-] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Took 2.20 seconds to deallocate network for instance. [ 770.286027] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Successfully updated port: 1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.385551] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52bd6e50-7195-7841-7a82-06e44725aef4, 'name': SearchDatastore_Task, 'duration_secs': 0.014424} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.385867] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.386125] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.386426] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.386610] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.386835] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.389778] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3908e325-76b6-4200-a14a-648c8ec28a87 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.406539] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.406539] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.406539] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef0ca33-2c5d-4878-9ad3-686fb84fd561 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.413137] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 770.413137] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2fa4b-05b0-70a3-a46a-871d1fc0e683" [ 770.413137] env[69796]: _type = "Task" [ 770.413137] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.424538] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2fa4b-05b0-70a3-a46a-871d1fc0e683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.509664] env[69796]: DEBUG nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 770.525426] env[69796]: DEBUG nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 770.525730] env[69796]: DEBUG nova.compute.provider_tree [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 770.542243] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.545397] env[69796]: DEBUG nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 770.563853] env[69796]: DEBUG nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 770.592036] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Acquiring lock "274e4d87-ec17-4210-a0fb-e226d29ed0d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.592036] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Lock "274e4d87-ec17-4210-a0fb-e226d29ed0d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.629857] env[69796]: DEBUG oslo_vmware.api [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234328, 'name': PowerOnVM_Task, 'duration_secs': 0.513521} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.632639] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 770.632939] env[69796]: INFO nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Took 10.06 seconds to spawn the instance on the hypervisor. [ 770.633193] env[69796]: DEBUG nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 770.634326] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe40cd3e-89e3-4f12-b3eb-0188cab111a3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.765019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.788764] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.789883] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquired lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.790187] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.913263] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "74b17bd9-66c4-4a88-b3de-fc5f720f4eca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.913554] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "74b17bd9-66c4-4a88-b3de-fc5f720f4eca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.921230] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ce5abde-8e13-4a26-b094-bbc032f0f1c9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.936687] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2aecd8-3af3-4193-bca3-d0c4b7fbb21f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.941255] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2fa4b-05b0-70a3-a46a-871d1fc0e683, 'name': SearchDatastore_Task, 'duration_secs': 0.016293} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.942971] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fe9fa71-2c59-4e0d-b6cd-1b2571a536dd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.981028] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76012992-391e-49cc-bc9b-0b8e3261bd42 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.987067] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 770.987067] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]520cbb76-988e-5700-a3f1-307905be97ab" [ 770.987067] env[69796]: _type = "Task" [ 770.987067] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.997302] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bba4b4-1340-425d-b8f1-34c7881fed5e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.008108] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]520cbb76-988e-5700-a3f1-307905be97ab, 'name': SearchDatastore_Task, 'duration_secs': 0.026976} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.008108] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.008494] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d746d66b-32df-4a4d-97bd-82b4ad364461/d746d66b-32df-4a4d-97bd-82b4ad364461.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.008562] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ca1442b-d766-415a-a4ae-a92b68f55717 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.022028] env[69796]: DEBUG nova.compute.provider_tree [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.031086] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 771.031086] env[69796]: value = "task-4234329" [ 771.031086] env[69796]: _type = "Task" [ 771.031086] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.043524] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234329, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.157317] env[69796]: INFO nova.compute.manager [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Took 23.93 seconds to build instance. [ 771.425127] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.482027] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.536461] env[69796]: DEBUG nova.compute.manager [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Received event network-vif-plugged-1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 771.536565] env[69796]: DEBUG oslo_concurrency.lockutils [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] Acquiring lock "3020e505-513b-4b29-996a-6e70a212f508-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.536735] env[69796]: DEBUG oslo_concurrency.lockutils [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] Lock "3020e505-513b-4b29-996a-6e70a212f508-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.537582] env[69796]: DEBUG oslo_concurrency.lockutils [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] Lock "3020e505-513b-4b29-996a-6e70a212f508-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.537582] env[69796]: DEBUG nova.compute.manager [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] No waiting events found dispatching network-vif-plugged-1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 771.537582] env[69796]: WARNING nova.compute.manager [req-71ad291d-2c71-42fe-bc8e-502ccf8a9ee7 req-09de6f08-a18b-418d-8931-8cd1975ffde3 service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Received unexpected event network-vif-plugged-1e57c340-c4d4-45c7-977f-167e94856c47 for instance with vm_state building and task_state spawning. [ 771.554352] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234329, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.572352] env[69796]: ERROR nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [req-e73c1328-9566-4f94-a33d-22970ea314d8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e73c1328-9566-4f94-a33d-22970ea314d8"}]} [ 771.572815] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.123s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.575247] env[69796]: ERROR nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Traceback (most recent call last): [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] yield [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] self.set_inventory_for_provider( [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 771.575247] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e73c1328-9566-4f94-a33d-22970ea314d8"}]} [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] During handling of the above exception, another exception occurred: [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Traceback (most recent call last): [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] with self.rt.instance_claim(context, instance, node, allocs, [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 771.575564] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] return f(*args, **kwargs) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] self._update(elevated, cn) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] self._update_to_placement(context, compute_node, startup) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] return attempt.get(self._wrap_exception) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] six.reraise(self.value[0], self.value[1], self.value[2]) [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] raise value [ 771.575858] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] self.reportclient.update_from_provider_tree( [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] with catch_all(pd.uuid): [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] self.gen.throw(typ, value, traceback) [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] raise exception.ResourceProviderSyncFailed() [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 771.576454] env[69796]: ERROR nova.compute.manager [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] [ 771.580024] env[69796]: DEBUG nova.compute.utils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 771.580024] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.854s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.589022] env[69796]: INFO nova.compute.claims [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.589647] env[69796]: DEBUG nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Build of instance 119768d0-2727-4ef8-b28b-c01cd46fc671 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 771.590287] env[69796]: DEBUG nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 771.590625] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "refresh_cache-119768d0-2727-4ef8-b28b-c01cd46fc671" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.590876] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquired lock "refresh_cache-119768d0-2727-4ef8-b28b-c01cd46fc671" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.591146] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 771.660111] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b770e4ed-c2e7-4902-bccd-6ace6a8617a1 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.445s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.720561] env[69796]: DEBUG nova.compute.manager [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Received event network-changed-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 771.720752] env[69796]: DEBUG nova.compute.manager [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Refreshing instance network info cache due to event network-changed-e84f8d87-5538-4cfd-ac81-c58c1b4fca74. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 771.721015] env[69796]: DEBUG oslo_concurrency.lockutils [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] Acquiring lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.721202] env[69796]: DEBUG oslo_concurrency.lockutils [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] Acquired lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.721376] env[69796]: DEBUG nova.network.neutron [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Refreshing network info cache for port e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.054457] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234329, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904356} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.054457] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d746d66b-32df-4a4d-97bd-82b4ad364461/d746d66b-32df-4a4d-97bd-82b4ad364461.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.054457] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.054457] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54ba1582-298e-4671-aaec-80e20e033dbc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.061302] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 772.061302] env[69796]: value = "task-4234330" [ 772.061302] env[69796]: _type = "Task" [ 772.061302] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.077179] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.122156] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "2218ece0-5246-451d-9bdc-8fd01cfe6ec3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.122815] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "2218ece0-5246-451d-9bdc-8fd01cfe6ec3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.162184] env[69796]: DEBUG nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 772.165824] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.210864] env[69796]: DEBUG nova.network.neutron [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Updating instance_info_cache with network_info: [{"id": "1e57c340-c4d4-45c7-977f-167e94856c47", "address": "fa:16:3e:91:0e:63", "network": {"id": "8c762278-1714-4b49-b234-529faa6423cf", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1280233570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed7b2e738b0045d5981e862f2b1cecc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e57c340-c4", "ovs_interfaceid": "1e57c340-c4d4-45c7-977f-167e94856c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.385451] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.461501] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "38792225-b054-4c08-b3ec-51d46287b0f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.463032] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.463032] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "38792225-b054-4c08-b3ec-51d46287b0f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.463032] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.463431] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.467392] env[69796]: INFO nova.compute.manager [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Terminating instance [ 772.578492] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071636} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.581210] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.586027] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d41dc5-1691-465d-baaa-4592cc433392 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.617537] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] d746d66b-32df-4a4d-97bd-82b4ad364461/d746d66b-32df-4a4d-97bd-82b4ad364461.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.617537] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ecf05d3-a2bb-4567-a9c0-ce613552b351 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.636377] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 772.636377] env[69796]: value = "task-4234331" [ 772.636377] env[69796]: _type = "Task" [ 772.636377] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.647361] env[69796]: DEBUG nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 772.656027] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.670021] env[69796]: DEBUG nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 772.670021] env[69796]: DEBUG nova.compute.provider_tree [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.696269] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.696269] env[69796]: DEBUG nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 772.713670] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Successfully updated port: 55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 772.713670] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Releasing lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.713759] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Instance network_info: |[{"id": "1e57c340-c4d4-45c7-977f-167e94856c47", "address": "fa:16:3e:91:0e:63", "network": {"id": "8c762278-1714-4b49-b234-529faa6423cf", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1280233570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed7b2e738b0045d5981e862f2b1cecc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e57c340-c4", "ovs_interfaceid": "1e57c340-c4d4-45c7-977f-167e94856c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 772.713833] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:0e:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a0d5af-5be9-477a-837c-58ef55c717f4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e57c340-c4d4-45c7-977f-167e94856c47', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 772.722322] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Creating folder: Project (ed7b2e738b0045d5981e862f2b1cecc2). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.722698] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3091eae3-c37a-4e4d-8eaf-56856688debb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.735256] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Created folder: Project (ed7b2e738b0045d5981e862f2b1cecc2) in parent group-v837766. [ 772.735435] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Creating folder: Instances. Parent ref: group-v837800. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 772.735723] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b6e8ffc-f940-4619-bafa-f1eec6cf45fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.740719] env[69796]: DEBUG nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 772.753986] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Created folder: Instances in parent group-v837800. [ 772.754716] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.754716] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 772.755430] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2d20052-a752-4daa-8ad4-a2b42828cdc5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.785361] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 772.785361] env[69796]: value = "task-4234334" [ 772.785361] env[69796]: _type = "Task" [ 772.785361] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.794990] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234334, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.894731] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Releasing lock "refresh_cache-119768d0-2727-4ef8-b28b-c01cd46fc671" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.894731] env[69796]: DEBUG nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 772.894845] env[69796]: DEBUG nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 772.895653] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.929825] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.980069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "refresh_cache-38792225-b054-4c08-b3ec-51d46287b0f9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.980069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquired lock "refresh_cache-38792225-b054-4c08-b3ec-51d46287b0f9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.980303] env[69796]: DEBUG nova.network.neutron [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.984877] env[69796]: DEBUG nova.network.neutron [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updated VIF entry in instance network info cache for port e84f8d87-5538-4cfd-ac81-c58c1b4fca74. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.984877] env[69796]: DEBUG nova.network.neutron [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updating instance_info_cache with network_info: [{"id": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "address": "fa:16:3e:e8:f7:b7", "network": {"id": "f37276b1-9061-44cd-8bdf-392262418d58", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1623894011-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6317908a1b7243c090b4db6755634bce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f8d87-55", "ovs_interfaceid": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.155123] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234331, 'name': ReconfigVM_Task, 'duration_secs': 0.410739} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.155427] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Reconfigured VM instance instance-0000000e to attach disk [datastore2] d746d66b-32df-4a4d-97bd-82b4ad364461/d746d66b-32df-4a4d-97bd-82b4ad364461.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 773.156303] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8a56f1d-70c8-4f82-a87e-47d3b7133ab6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.166596] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 773.166596] env[69796]: value = "task-4234335" [ 773.166596] env[69796]: _type = "Task" [ 773.166596] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.178246] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234335, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.216080] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.216345] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquired lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.216574] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.222715] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501480a6-acfe-490c-af45-f50535b74f34 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.232029] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3d2a9d-11e1-4cd4-bbd4-7be86304a7f0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.267421] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a2092d-4962-4e33-80fd-e5482ba2503c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.276878] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab9f01d-f189-4174-86a2-5401dd011d13 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.296852] env[69796]: DEBUG nova.compute.provider_tree [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 773.308205] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234334, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.434479] env[69796]: DEBUG nova.network.neutron [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.489237] env[69796]: DEBUG oslo_concurrency.lockutils [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] Releasing lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.489237] env[69796]: DEBUG nova.compute.manager [req-eadc58f0-b4a0-4356-adf6-5c9c7cea2614 req-36a3dce5-91d4-4aec-9d5e-c07d405f67c7 service nova] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Received event network-vif-deleted-71d51eb3-e59f-4936-81b5-e8153da0b686 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 773.525681] env[69796]: DEBUG nova.network.neutron [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 773.678665] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234335, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.717891] env[69796]: DEBUG nova.network.neutron [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.811759] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234334, 'name': CreateVM_Task, 'duration_secs': 0.556244} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.811759] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.811759] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.811759] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.811759] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.812560] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-474f71b3-01f8-4230-9eac-1c3ce5ff8ebd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.819617] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 773.819617] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52acc63e-02a8-191e-336c-16c21c3ddb04" [ 773.819617] env[69796]: _type = "Task" [ 773.819617] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.833775] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52acc63e-02a8-191e-336c-16c21c3ddb04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.838767] env[69796]: ERROR nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [req-f221cf45-9b4a-4bc6-8f56-c653733751f2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f221cf45-9b4a-4bc6-8f56-c653733751f2"}]} [ 773.840727] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.840727] env[69796]: ERROR nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Traceback (most recent call last): [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] yield [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] self.set_inventory_for_provider( [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 773.840727] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f221cf45-9b4a-4bc6-8f56-c653733751f2"}]} [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] During handling of the above exception, another exception occurred: [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Traceback (most recent call last): [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] with self.rt.instance_claim(context, instance, node, allocs, [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 773.841135] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] return f(*args, **kwargs) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] self._update(elevated, cn) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] self._update_to_placement(context, compute_node, startup) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] return attempt.get(self._wrap_exception) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] raise value [ 773.841448] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] self.reportclient.update_from_provider_tree( [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] with catch_all(pd.uuid): [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] self.gen.throw(typ, value, traceback) [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] raise exception.ResourceProviderSyncFailed() [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 773.841939] env[69796]: ERROR nova.compute.manager [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] [ 773.842294] env[69796]: DEBUG nova.compute.utils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 773.842841] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.998s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.847730] env[69796]: INFO nova.compute.claims [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.851081] env[69796]: DEBUG nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Build of instance 6d0b5852-7b75-4054-9eb8-5af0496d800d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 773.851081] env[69796]: DEBUG nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 773.851081] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "refresh_cache-6d0b5852-7b75-4054-9eb8-5af0496d800d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.851081] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquired lock "refresh_cache-6d0b5852-7b75-4054-9eb8-5af0496d800d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.851301] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.935799] env[69796]: INFO nova.compute.manager [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: 119768d0-2727-4ef8-b28b-c01cd46fc671] Took 1.04 seconds to deallocate network for instance. [ 774.019457] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.186049] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234335, 'name': Rename_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.220741] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Releasing lock "refresh_cache-38792225-b054-4c08-b3ec-51d46287b0f9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.220741] env[69796]: DEBUG nova.compute.manager [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 774.220958] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.221841] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2a4b66-9d96-4580-a290-00164b455b68 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.230098] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 774.230379] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1072719c-19c0-4536-b3bb-29debcc748d0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.241087] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 774.241087] env[69796]: value = "task-4234336" [ 774.241087] env[69796]: _type = "Task" [ 774.241087] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.252042] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.331337] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52acc63e-02a8-191e-336c-16c21c3ddb04, 'name': SearchDatastore_Task, 'duration_secs': 0.027421} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.331650] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.331886] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.333399] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.333627] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.335262] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.335262] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9493643f-9e00-49fd-9818-b6248268d43a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.359583] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.359849] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.361226] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c25d6487-7aa5-44ad-a1d4-78675f81c4db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.371598] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 774.371598] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528432af-3a36-e01f-42e1-bc32adfe2cd4" [ 774.371598] env[69796]: _type = "Task" [ 774.371598] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.385171] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528432af-3a36-e01f-42e1-bc32adfe2cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.410990] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.577463] env[69796]: DEBUG nova.network.neutron [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Updating instance_info_cache with network_info: [{"id": "55aaf1a4-955a-4984-a881-c49328cd474c", "address": "fa:16:3e:a7:81:3d", "network": {"id": "f357be00-d875-4423-956f-1c4e8f1206b1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1161814879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd8f0e4ba0d94de18857def9ca4832d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55aaf1a4-95", "ovs_interfaceid": "55aaf1a4-955a-4984-a881-c49328cd474c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.610863] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Acquiring lock "659ec1bd-2be6-4f40-b513-a907b77f2ebb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.610863] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Lock "659ec1bd-2be6-4f40-b513-a907b77f2ebb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.652896] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.685321] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234335, 'name': Rename_Task, 'duration_secs': 1.451678} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.685637] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 774.685818] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07d48cae-bff5-42a4-bb55-23c9c4400557 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.692849] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 774.692849] env[69796]: value = "task-4234337" [ 774.692849] env[69796]: _type = "Task" [ 774.692849] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.701547] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.751757] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234336, 'name': PowerOffVM_Task, 'duration_secs': 0.140106} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.752055] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 774.752212] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 774.752607] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ef64c23-20e1-42f3-ac62-de98166e1e0b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.780029] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.780029] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.780183] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleting the datastore file [datastore2] 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.780426] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fde78f12-f3ae-443c-93c0-2c3843439e8d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.788577] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for the task: (returnval){ [ 774.788577] env[69796]: value = "task-4234339" [ 774.788577] env[69796]: _type = "Task" [ 774.788577] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.797862] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234339, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.807141] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "b97945f4-5c5b-4a98-adac-8337a7ed9011" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.807382] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "b97945f4-5c5b-4a98-adac-8337a7ed9011" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.888459] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]528432af-3a36-e01f-42e1-bc32adfe2cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.012896} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.891372] env[69796]: DEBUG nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 774.894594] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef582e7-c0a9-4e73-b465-0bfc998767fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.903051] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 774.903051] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521728b8-d726-7d59-7983-083419e59f9d" [ 774.903051] env[69796]: _type = "Task" [ 774.903051] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.913149] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521728b8-d726-7d59-7983-083419e59f9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.920870] env[69796]: DEBUG nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 774.921188] env[69796]: DEBUG nova.compute.provider_tree [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 774.928291] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Received event network-changed-1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 774.928291] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Refreshing instance network info cache due to event network-changed-1e57c340-c4d4-45c7-977f-167e94856c47. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 774.928291] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Acquiring lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.928291] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Acquired lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.928620] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Refreshing network info cache for port 1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.939479] env[69796]: DEBUG nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: ad8607c8-89b4-46c5-a48a-8c71751ba994 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 774.970929] env[69796]: DEBUG nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 774.995746] env[69796]: DEBUG nova.compute.manager [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Received event network-changed-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 774.995946] env[69796]: DEBUG nova.compute.manager [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Refreshing instance network info cache due to event network-changed-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 774.996285] env[69796]: DEBUG oslo_concurrency.lockutils [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] Acquiring lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.996393] env[69796]: DEBUG oslo_concurrency.lockutils [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] Acquired lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.996551] env[69796]: DEBUG nova.network.neutron [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Refreshing network info cache for port 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.998519] env[69796]: INFO nova.scheduler.client.report [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Deleted allocations for instance 119768d0-2727-4ef8-b28b-c01cd46fc671 [ 775.081367] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Releasing lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.081897] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Instance network_info: |[{"id": "55aaf1a4-955a-4984-a881-c49328cd474c", "address": "fa:16:3e:a7:81:3d", "network": {"id": "f357be00-d875-4423-956f-1c4e8f1206b1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1161814879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd8f0e4ba0d94de18857def9ca4832d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55aaf1a4-95", "ovs_interfaceid": "55aaf1a4-955a-4984-a881-c49328cd474c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 775.082470] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:81:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4aa1eda7-48b9-4fa2-af0b-94c718313af2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '55aaf1a4-955a-4984-a881-c49328cd474c', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.091856] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Creating folder: Project (dd8f0e4ba0d94de18857def9ca4832d7). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.092298] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1630e149-a44c-40a7-8fc2-c58cf06c2bae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.111030] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Created folder: Project (dd8f0e4ba0d94de18857def9ca4832d7) in parent group-v837766. [ 775.111030] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Creating folder: Instances. Parent ref: group-v837803. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.111030] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-24a03146-a1ec-443c-9ef6-5fca43b8b045 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.120713] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Created folder: Instances in parent group-v837803. [ 775.120976] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.121203] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.121413] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-452ab8a1-060b-4ce2-963a-c204985202af {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.157323] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Releasing lock "refresh_cache-6d0b5852-7b75-4054-9eb8-5af0496d800d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.159643] env[69796]: DEBUG nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 775.159930] env[69796]: DEBUG nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 775.161283] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.164961] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.164961] env[69796]: value = "task-4234342" [ 775.164961] env[69796]: _type = "Task" [ 775.164961] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.178479] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234342, 'name': CreateVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.187611] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.209114] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234337, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.302317] env[69796]: DEBUG oslo_vmware.api [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Task: {'id': task-4234339, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.303096] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.303351] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.303602] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.303722] env[69796]: INFO nova.compute.manager [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Took 1.08 seconds to destroy the instance on the hypervisor. [ 775.303991] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.304758] env[69796]: DEBUG nova.compute.manager [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 775.304758] env[69796]: DEBUG nova.network.neutron [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 775.349196] env[69796]: DEBUG nova.network.neutron [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.418300] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521728b8-d726-7d59-7983-083419e59f9d, 'name': SearchDatastore_Task, 'duration_secs': 0.010711} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.418300] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.418625] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 3020e505-513b-4b29-996a-6e70a212f508/3020e505-513b-4b29-996a-6e70a212f508.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.419012] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5460ab6d-5f34-401b-9bc7-c066593f3bef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.435313] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 775.435313] env[69796]: value = "task-4234343" [ 775.435313] env[69796]: _type = "Task" [ 775.435313] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.447078] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234343, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.486381] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac92fb7-1349-45d4-8c6f-ac7d319516e0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.494325] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8c92fa-9913-483c-ab4a-695aa8988df9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.538366] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4793a5b9-de5d-48a1-852f-addcf02378d0 tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "119768d0-2727-4ef8-b28b-c01cd46fc671" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.255s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.544599] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfd657b-4871-4ce7-ac9a-07497b5fbf08 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.551352] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde075b9-a455-419a-9b07-43d09cece671 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.571388] env[69796]: DEBUG nova.compute.provider_tree [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 775.685811] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234342, 'name': CreateVM_Task, 'duration_secs': 0.383931} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.688529] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 775.689397] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.689504] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.690104] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 775.691136] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f0858ee-15f1-4c01-a755-6d5c2b8ca6ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.692454] env[69796]: DEBUG nova.network.neutron [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.698525] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 775.698525] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52dced90-1dbc-c242-da37-4119262aa3f8" [ 775.698525] env[69796]: _type = "Task" [ 775.698525] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.719466] env[69796]: DEBUG oslo_vmware.api [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234337, 'name': PowerOnVM_Task, 'duration_secs': 0.735252} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.719654] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52dced90-1dbc-c242-da37-4119262aa3f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.719912] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.720122] env[69796]: INFO nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Took 12.73 seconds to spawn the instance on the hypervisor. [ 775.721090] env[69796]: DEBUG nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 775.721437] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1970378-112c-47a1-a92c-75ec1d5e16f3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.851017] env[69796]: DEBUG nova.network.neutron [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.953072] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234343, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.957086] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Updated VIF entry in instance network info cache for port 1e57c340-c4d4-45c7-977f-167e94856c47. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.957463] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Updating instance_info_cache with network_info: [{"id": "1e57c340-c4d4-45c7-977f-167e94856c47", "address": "fa:16:3e:91:0e:63", "network": {"id": "8c762278-1714-4b49-b234-529faa6423cf", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1280233570-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed7b2e738b0045d5981e862f2b1cecc2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a0d5af-5be9-477a-837c-58ef55c717f4", "external-id": "nsx-vlan-transportzone-598", "segmentation_id": 598, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e57c340-c4", "ovs_interfaceid": "1e57c340-c4d4-45c7-977f-167e94856c47", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.046210] env[69796]: DEBUG nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 776.104577] env[69796]: ERROR nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [req-2711ba37-0a25-4dfe-86ca-4c086d940ae0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2711ba37-0a25-4dfe-86ca-4c086d940ae0"}]} [ 776.104952] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.105551] env[69796]: ERROR nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Traceback (most recent call last): [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] yield [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] self.set_inventory_for_provider( [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 776.105551] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2711ba37-0a25-4dfe-86ca-4c086d940ae0"}]} [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] During handling of the above exception, another exception occurred: [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Traceback (most recent call last): [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] with self.rt.instance_claim(context, instance, node, allocs, [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 776.105838] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] return f(*args, **kwargs) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] self._update(elevated, cn) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] self._update_to_placement(context, compute_node, startup) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] return attempt.get(self._wrap_exception) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] six.reraise(self.value[0], self.value[1], self.value[2]) [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] raise value [ 776.106137] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] self.reportclient.update_from_provider_tree( [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] with catch_all(pd.uuid): [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] self.gen.throw(typ, value, traceback) [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] raise exception.ResourceProviderSyncFailed() [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 776.106483] env[69796]: ERROR nova.compute.manager [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] [ 776.107474] env[69796]: DEBUG nova.compute.utils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 776.108981] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.194s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.109206] env[69796]: DEBUG nova.objects.instance [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69796) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 776.112239] env[69796]: DEBUG nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Build of instance fcc5bac9-b312-4d4f-8ffb-828ee110aa60 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 776.112731] env[69796]: DEBUG nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 776.112976] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "refresh_cache-fcc5bac9-b312-4d4f-8ffb-828ee110aa60" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.113159] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquired lock "refresh_cache-fcc5bac9-b312-4d4f-8ffb-828ee110aa60" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.113314] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 776.195546] env[69796]: INFO nova.compute.manager [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: 6d0b5852-7b75-4054-9eb8-5af0496d800d] Took 1.04 seconds to deallocate network for instance. [ 776.218109] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52dced90-1dbc-c242-da37-4119262aa3f8, 'name': SearchDatastore_Task, 'duration_secs': 0.067098} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.218592] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.218908] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.219643] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.219867] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.220508] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.221604] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0f43e90-199d-4e1e-84e0-d588b499affa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.226147] env[69796]: DEBUG nova.network.neutron [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updated VIF entry in instance network info cache for port 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 776.226147] env[69796]: DEBUG nova.network.neutron [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating instance_info_cache with network_info: [{"id": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "address": "fa:16:3e:f8:3c:db", "network": {"id": "229c13a1-394f-41e3-b02e-fdbf0dcb47d3", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-483928764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "56e348a3093e4519b918105e54ffc10b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c9189a9-01", "ovs_interfaceid": "3c9189a9-01dd-42e1-b2b3-9d0f3f53448e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.244140] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.244140] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.247429] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfc46a84-51c1-48f0-b048-050d12d9b7ad {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.251182] env[69796]: INFO nova.compute.manager [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Took 26.26 seconds to build instance. [ 776.256788] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 776.256788] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523b6a28-2cb5-003a-5f33-dbedded97eaa" [ 776.256788] env[69796]: _type = "Task" [ 776.256788] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.273195] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523b6a28-2cb5-003a-5f33-dbedded97eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.011763} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.273802] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eaadf825-e22d-468e-8df5-7b91512bc757 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.283778] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 776.283778] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa0c3c-4e00-e610-b660-580b9eef07b9" [ 776.283778] env[69796]: _type = "Task" [ 776.283778] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.295023] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa0c3c-4e00-e610-b660-580b9eef07b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.355015] env[69796]: INFO nova.compute.manager [-] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Took 1.05 seconds to deallocate network for instance. [ 776.452863] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234343, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.462748] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Releasing lock "refresh_cache-3020e505-513b-4b29-996a-6e70a212f508" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.463295] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Received event network-vif-plugged-55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.466904] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Acquiring lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.466904] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.466904] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.466904] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] No waiting events found dispatching network-vif-plugged-55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 776.466904] env[69796]: WARNING nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Received unexpected event network-vif-plugged-55aaf1a4-955a-4984-a881-c49328cd474c for instance with vm_state building and task_state spawning. [ 776.467107] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Received event network-changed-55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 776.467107] env[69796]: DEBUG nova.compute.manager [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Refreshing instance network info cache due to event network-changed-55aaf1a4-955a-4984-a881-c49328cd474c. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 776.467107] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Acquiring lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.467107] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Acquired lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.467107] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Refreshing network info cache for port 55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.570209] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.658788] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 776.734200] env[69796]: DEBUG oslo_concurrency.lockutils [req-eb1a2fc7-a85f-4615-b977-9ce525dd4ab6 req-5ac16d61-3529-4e6c-bec9-a22455a9fa67 service nova] Releasing lock "refresh_cache-9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.756181] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7df59935-5b10-462f-abf8-713668cc501c tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.526s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.795168] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa0c3c-4e00-e610-b660-580b9eef07b9, 'name': SearchDatastore_Task, 'duration_secs': 0.011864} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.795485] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.795834] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] f0d4f167-344a-4828-9f6e-8a62ed8e064d/f0d4f167-344a-4828-9f6e-8a62ed8e064d.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 776.798324] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a268126b-05f1-4f2c-bfe0-70ec27f79faa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.804735] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 776.804735] env[69796]: value = "task-4234344" [ 776.804735] env[69796]: _type = "Task" [ 776.804735] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.819050] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234344, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.854752] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "46c6028e-1282-4585-bc96-58e0c036b5ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.854752] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "46c6028e-1282-4585-bc96-58e0c036b5ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.863029] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.870579] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.956028] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234343, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.133353] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8059c688-4d43-4339-9731-d092beef2c96 tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.136108] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.477s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.136108] env[69796]: DEBUG nova.objects.instance [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lazy-loading 'resources' on Instance uuid 47005af8-11fe-498f-9b67-e0316faeeb8f {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 777.240474] env[69796]: INFO nova.scheduler.client.report [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Deleted allocations for instance 6d0b5852-7b75-4054-9eb8-5af0496d800d [ 777.260767] env[69796]: DEBUG nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.320064] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234344, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.373394] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Releasing lock "refresh_cache-fcc5bac9-b312-4d4f-8ffb-828ee110aa60" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.373895] env[69796]: DEBUG nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 777.374208] env[69796]: DEBUG nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 777.374394] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 777.428485] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 777.457460] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234343, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.582566} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.457825] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 3020e505-513b-4b29-996a-6e70a212f508/3020e505-513b-4b29-996a-6e70a212f508.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.460126] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.460126] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-45f22b9f-be38-43be-ac63-0bdf709f2d4d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.471778] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 777.471778] env[69796]: value = "task-4234345" [ 777.471778] env[69796]: _type = "Task" [ 777.471778] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.486066] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234345, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.487815] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Updated VIF entry in instance network info cache for port 55aaf1a4-955a-4984-a881-c49328cd474c. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.488528] env[69796]: DEBUG nova.network.neutron [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Updating instance_info_cache with network_info: [{"id": "55aaf1a4-955a-4984-a881-c49328cd474c", "address": "fa:16:3e:a7:81:3d", "network": {"id": "f357be00-d875-4423-956f-1c4e8f1206b1", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1161814879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd8f0e4ba0d94de18857def9ca4832d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4aa1eda7-48b9-4fa2-af0b-94c718313af2", "external-id": "nsx-vlan-transportzone-502", "segmentation_id": 502, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55aaf1a4-95", "ovs_interfaceid": "55aaf1a4-955a-4984-a881-c49328cd474c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.665275] env[69796]: DEBUG nova.scheduler.client.report [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 777.699931] env[69796]: DEBUG nova.scheduler.client.report [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 777.699931] env[69796]: DEBUG nova.compute.provider_tree [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.715806] env[69796]: DEBUG nova.scheduler.client.report [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 777.735924] env[69796]: DEBUG nova.scheduler.client.report [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 777.750970] env[69796]: DEBUG oslo_concurrency.lockutils [None req-92d98a58-b199-4cfa-8ae5-c53491df083f tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "6d0b5852-7b75-4054-9eb8-5af0496d800d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.196s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.794608] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.821529] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234344, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.69764} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.822037] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] f0d4f167-344a-4828-9f6e-8a62ed8e064d/f0d4f167-344a-4828-9f6e-8a62ed8e064d.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 777.822099] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 777.823470] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72f25bfe-a983-4c8b-9ddd-364b884f14ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.833190] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 777.833190] env[69796]: value = "task-4234346" [ 777.833190] env[69796]: _type = "Task" [ 777.833190] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.842853] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234346, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.932754] env[69796]: DEBUG nova.network.neutron [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.985239] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234345, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.308868} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.985532] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.989228] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeb886c-7ced-4f65-b34b-1d65b89890c4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.994289] env[69796]: DEBUG oslo_concurrency.lockutils [req-1abdb83e-df5c-44e7-9cb0-c77454a7652d req-4eabe8fe-b9ee-41cf-b0f4-c6f2c482bf6c service nova] Releasing lock "refresh_cache-f0d4f167-344a-4828-9f6e-8a62ed8e064d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.017125] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 3020e505-513b-4b29-996a-6e70a212f508/3020e505-513b-4b29-996a-6e70a212f508.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.020144] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e88fbddc-217f-4ab7-943f-b7c5492b2d5e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.046846] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 778.046846] env[69796]: value = "task-4234347" [ 778.046846] env[69796]: _type = "Task" [ 778.046846] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.056597] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234347, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.172338] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce455c31-26d6-427f-a409-25690a94f0d9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.181198] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d09d27-f772-4179-a847-b5139ef1a7f3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.224315] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b010b1-6b5e-48e4-ab3c-a016998f4689 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.236987] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7104dad6-88c0-497f-8ed9-800ed83a86f6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.257451] env[69796]: DEBUG nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.261527] env[69796]: DEBUG nova.compute.provider_tree [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.347258] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234346, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118688} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.350431] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 778.350431] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c375e1-729c-4161-86c9-55626683e3d2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.380125] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] f0d4f167-344a-4828-9f6e-8a62ed8e064d/f0d4f167-344a-4828-9f6e-8a62ed8e064d.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 778.380361] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-409ddd85-5e9e-4dce-9a04-b0f2605b168c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.403204] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 778.403204] env[69796]: value = "task-4234348" [ 778.403204] env[69796]: _type = "Task" [ 778.403204] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.411721] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "10c782a6-6507-482e-8671-2278375a68fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.412031] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "10c782a6-6507-482e-8671-2278375a68fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.419152] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234348, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.436551] env[69796]: INFO nova.compute.manager [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: fcc5bac9-b312-4d4f-8ffb-828ee110aa60] Took 1.06 seconds to deallocate network for instance. [ 778.562696] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234347, 'name': ReconfigVM_Task, 'duration_secs': 0.41767} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.562696] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 3020e505-513b-4b29-996a-6e70a212f508/3020e505-513b-4b29-996a-6e70a212f508.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.563718] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74d3f985-b54a-496a-b601-bc24ad5c1563 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.573285] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 778.573285] env[69796]: value = "task-4234349" [ 778.573285] env[69796]: _type = "Task" [ 778.573285] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.583393] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234349, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.800794] env[69796]: ERROR nova.scheduler.client.report [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [req-578fb2e7-0ae8-4b9d-b248-6f3ba564acca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-578fb2e7-0ae8-4b9d-b248-6f3ba564acca"}]} [ 778.800794] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.666s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.801773] env[69796]: ERROR nova.compute.manager [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Traceback (most recent call last): [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] yield [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self.set_inventory_for_provider( [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 778.801773] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-578fb2e7-0ae8-4b9d-b248-6f3ba564acca"}]} [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] During handling of the above exception, another exception occurred: [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Traceback (most recent call last): [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self._delete_instance(context, instance, bdms) [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 778.802043] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self._complete_deletion(context, instance) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self._update_resource_tracker(context, instance) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self.rt.update_usage(context, instance, instance.node) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] return f(*args, **kwargs) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self._update(context.elevated(), self.compute_nodes[nodename]) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self._update_to_placement(context, compute_node, startup) [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 778.802338] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] return attempt.get(self._wrap_exception) [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] raise value [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self.reportclient.update_from_provider_tree( [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] with catch_all(pd.uuid): [ 778.802911] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 778.803464] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] self.gen.throw(typ, value, traceback) [ 778.803464] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 778.803464] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] raise exception.ResourceProviderSyncFailed() [ 778.803464] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 778.803464] env[69796]: ERROR nova.compute.manager [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] [ 778.809711] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.597s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.811875] env[69796]: INFO nova.compute.claims [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.824219] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.905570] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Acquiring lock "0067f7d5-8349-473d-b0e8-e396026b2393" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.905570] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Lock "0067f7d5-8349-473d-b0e8-e396026b2393" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.923034] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234348, 'name': ReconfigVM_Task, 'duration_secs': 0.300355} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.923034] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Reconfigured VM instance instance-00000010 to attach disk [datastore2] f0d4f167-344a-4828-9f6e-8a62ed8e064d/f0d4f167-344a-4828-9f6e-8a62ed8e064d.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.923034] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8ecba2a-a46d-4010-8381-39dc49010916 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.934234] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 778.934234] env[69796]: value = "task-4234350" [ 778.934234] env[69796]: _type = "Task" [ 778.934234] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.950103] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234350, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.090028] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234349, 'name': Rename_Task, 'duration_secs': 0.160461} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.090028] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.090028] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-71bea85c-2049-4be1-a5ea-dcf7037db078 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.097263] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 779.097263] env[69796]: value = "task-4234351" [ 779.097263] env[69796]: _type = "Task" [ 779.097263] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.107685] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.324543] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.432s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.443502] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234350, 'name': Rename_Task, 'duration_secs': 0.150632} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.445637] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.445637] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c3c7431-f205-4f9a-81e9-daafca9bff8a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.455102] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 779.455102] env[69796]: value = "task-4234352" [ 779.455102] env[69796]: _type = "Task" [ 779.455102] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.468519] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.483486] env[69796]: INFO nova.scheduler.client.report [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Deleted allocations for instance fcc5bac9-b312-4d4f-8ffb-828ee110aa60 [ 779.610990] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234351, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.864615] env[69796]: DEBUG nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 779.890917] env[69796]: DEBUG nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 779.891875] env[69796]: DEBUG nova.compute.provider_tree [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 779.916410] env[69796]: DEBUG nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 779.944291] env[69796]: DEBUG nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 779.976381] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234352, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.996245] env[69796]: DEBUG oslo_concurrency.lockutils [None req-73c9e92c-25df-4903-9476-a439ac1ff525 tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "fcc5bac9-b312-4d4f-8ffb-828ee110aa60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.178s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.114137] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234351, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.455215] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786c979b-1e82-4d8a-b319-a1c4a49a059a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.478374] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818a500d-86aa-4db7-8351-b342de0a916c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.484996] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234352, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.520073] env[69796]: DEBUG nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 780.524842] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58314c58-c73a-4cce-bfc9-c02c3593e3b2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.539656] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f343eb35-68ed-44da-9fce-d1d021c122bc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.561823] env[69796]: DEBUG nova.compute.provider_tree [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 780.569196] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "df6e15b9-640f-40c2-a146-4361de14f8b0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.569196] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "df6e15b9-640f-40c2-a146-4361de14f8b0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.611817] env[69796]: DEBUG oslo_vmware.api [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234351, 'name': PowerOnVM_Task, 'duration_secs': 1.492584} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.612555] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.612555] env[69796]: INFO nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Took 13.23 seconds to spawn the instance on the hypervisor. [ 780.612555] env[69796]: DEBUG nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 780.613403] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa51651-af00-4188-b866-bed1e2edf158 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.849487] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.972253] env[69796]: DEBUG oslo_vmware.api [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234352, 'name': PowerOnVM_Task, 'duration_secs': 1.124245} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.976030] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.976030] env[69796]: INFO nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Took 11.04 seconds to spawn the instance on the hypervisor. [ 780.976030] env[69796]: DEBUG nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 780.976030] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c4ba16-9923-4d96-8ce6-4e1a84cb73fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.054498] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.091222] env[69796]: ERROR nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [req-c031cd69-804c-4838-9d7e-fea1852300ae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c031cd69-804c-4838-9d7e-fea1852300ae"}]} [ 781.091786] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.093490] env[69796]: ERROR nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Traceback (most recent call last): [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] yield [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] self.set_inventory_for_provider( [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 781.093490] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c031cd69-804c-4838-9d7e-fea1852300ae"}]} [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] During handling of the above exception, another exception occurred: [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Traceback (most recent call last): [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] with self.rt.instance_claim(context, instance, node, allocs, [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 781.093868] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] return f(*args, **kwargs) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] self._update(elevated, cn) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] self._update_to_placement(context, compute_node, startup) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] return attempt.get(self._wrap_exception) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] raise value [ 781.094495] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] self.reportclient.update_from_provider_tree( [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] with catch_all(pd.uuid): [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] self.gen.throw(typ, value, traceback) [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] raise exception.ResourceProviderSyncFailed() [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 781.095016] env[69796]: ERROR nova.compute.manager [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] [ 781.095506] env[69796]: DEBUG nova.compute.utils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 781.095506] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.034s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.097448] env[69796]: INFO nova.compute.claims [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.101214] env[69796]: DEBUG nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Build of instance 12b7c520-b21f-48d1-a1fe-6c12dcb713cb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 781.101384] env[69796]: DEBUG nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 781.101500] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Acquiring lock "refresh_cache-12b7c520-b21f-48d1-a1fe-6c12dcb713cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.101642] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Acquired lock "refresh_cache-12b7c520-b21f-48d1-a1fe-6c12dcb713cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.101805] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 781.133759] env[69796]: INFO nova.compute.manager [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Took 26.59 seconds to build instance. [ 781.352531] env[69796]: DEBUG nova.compute.manager [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Received event network-changed-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 781.352776] env[69796]: DEBUG nova.compute.manager [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Refreshing instance network info cache due to event network-changed-e84f8d87-5538-4cfd-ac81-c58c1b4fca74. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 781.352997] env[69796]: DEBUG oslo_concurrency.lockutils [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] Acquiring lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.353150] env[69796]: DEBUG oslo_concurrency.lockutils [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] Acquired lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.353307] env[69796]: DEBUG nova.network.neutron [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Refreshing network info cache for port e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.498435] env[69796]: INFO nova.compute.manager [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Took 26.09 seconds to build instance. [ 781.637635] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a5ddd6ff-091e-4a0c-891b-191511849874 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.338s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.643828] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 781.793374] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.979551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "f92627d1-b895-4564-b975-2a596b6dd814" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.979921] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "f92627d1-b895-4564-b975-2a596b6dd814" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.001581] env[69796]: DEBUG oslo_concurrency.lockutils [None req-06c8a32f-607f-4cae-8db7-086b18da9e13 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.164s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.137676] env[69796]: DEBUG nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.151115] env[69796]: DEBUG nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 782.187364] env[69796]: DEBUG nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 782.187364] env[69796]: DEBUG nova.compute.provider_tree [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 782.207881] env[69796]: DEBUG nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 782.237091] env[69796]: DEBUG nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 782.297198] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Releasing lock "refresh_cache-12b7c520-b21f-48d1-a1fe-6c12dcb713cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.297198] env[69796]: DEBUG nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 782.297672] env[69796]: DEBUG nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 782.298906] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 782.341069] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 782.506475] env[69796]: DEBUG nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 782.667703] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.699565] env[69796]: DEBUG nova.network.neutron [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updated VIF entry in instance network info cache for port e84f8d87-5538-4cfd-ac81-c58c1b4fca74. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 782.700213] env[69796]: DEBUG nova.network.neutron [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updating instance_info_cache with network_info: [{"id": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "address": "fa:16:3e:e8:f7:b7", "network": {"id": "f37276b1-9061-44cd-8bdf-392262418d58", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1623894011-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6317908a1b7243c090b4db6755634bce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3117b312-701b-4439-b197-96b6c5cdca89", "external-id": "nsx-vlan-transportzone-94", "segmentation_id": 94, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape84f8d87-55", "ovs_interfaceid": "e84f8d87-5538-4cfd-ac81-c58c1b4fca74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.760925] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea430ef2-61bb-4d6c-91cc-91aa78043ac3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.779294] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced52519-c974-480b-8534-62f7be055814 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.817611] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aae293-d008-4603-ac2c-ad8fd8fc4138 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.827464] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17aba6d5-30d0-43bd-82a8-53eae98966fd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.843737] env[69796]: DEBUG nova.compute.provider_tree [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 782.845394] env[69796]: DEBUG nova.network.neutron [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.033326] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.072607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.072607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.072607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.072607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.072765] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.076125] env[69796]: INFO nova.compute.manager [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Terminating instance [ 783.205122] env[69796]: DEBUG oslo_concurrency.lockutils [req-6106abdf-1883-49eb-b8c1-6e31635ef747 req-47675745-0e66-4be6-bc1b-c75268b6a2c9 service nova] Releasing lock "refresh_cache-d746d66b-32df-4a4d-97bd-82b4ad364461" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.279630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "3020e505-513b-4b29-996a-6e70a212f508" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.279630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.279630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "3020e505-513b-4b29-996a-6e70a212f508-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.279630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.279962] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.283867] env[69796]: INFO nova.compute.manager [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Terminating instance [ 783.353762] env[69796]: INFO nova.compute.manager [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] [instance: 12b7c520-b21f-48d1-a1fe-6c12dcb713cb] Took 1.06 seconds to deallocate network for instance. [ 783.389512] env[69796]: ERROR nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [req-c13d7269-7ace-4a1a-b5fc-cd7a19f939b1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c13d7269-7ace-4a1a-b5fc-cd7a19f939b1"}]} [ 783.390755] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.390755] env[69796]: ERROR nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Traceback (most recent call last): [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] yield [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] self.set_inventory_for_provider( [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 783.390755] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c13d7269-7ace-4a1a-b5fc-cd7a19f939b1"}]} [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] During handling of the above exception, another exception occurred: [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Traceback (most recent call last): [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] with self.rt.instance_claim(context, instance, node, allocs, [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 783.391125] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] return f(*args, **kwargs) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] self._update(elevated, cn) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] self._update_to_placement(context, compute_node, startup) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] return attempt.get(self._wrap_exception) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] raise value [ 783.391459] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] self.reportclient.update_from_provider_tree( [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] with catch_all(pd.uuid): [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] self.gen.throw(typ, value, traceback) [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] raise exception.ResourceProviderSyncFailed() [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 783.391907] env[69796]: ERROR nova.compute.manager [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] [ 783.392245] env[69796]: DEBUG nova.compute.utils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 783.396944] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.880s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.396944] env[69796]: DEBUG nova.objects.instance [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lazy-loading 'resources' on Instance uuid 7f37f6c9-adba-4292-9d47-c455f77e539f {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.399882] env[69796]: DEBUG nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Build of instance 4f4449ab-939d-4d96-9cd0-419a121575cb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 783.399882] env[69796]: DEBUG nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 783.400044] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "refresh_cache-4f4449ab-939d-4d96-9cd0-419a121575cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.400143] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquired lock "refresh_cache-4f4449ab-939d-4d96-9cd0-419a121575cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.400219] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.583418] env[69796]: DEBUG nova.compute.manager [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 783.583418] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.583418] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e602823a-6a03-460f-ac3b-a69b63ba510f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.597383] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.598312] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-96e4a6f1-6d97-4cc7-96d8-90508aeb9459 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.609320] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 783.609320] env[69796]: value = "task-4234353" [ 783.609320] env[69796]: _type = "Task" [ 783.609320] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.622118] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.793810] env[69796]: DEBUG nova.compute.manager [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 783.793810] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.794382] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75691b36-0adf-445c-9bc7-b97dc1c1c4da {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.805459] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 783.806295] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-867aa405-ee32-4b0c-8492-ecb982df4068 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.814860] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 783.814860] env[69796]: value = "task-4234354" [ 783.814860] env[69796]: _type = "Task" [ 783.814860] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.829608] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.957367] env[69796]: DEBUG nova.scheduler.client.report [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 783.982911] env[69796]: DEBUG nova.scheduler.client.report [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 783.982911] env[69796]: DEBUG nova.compute.provider_tree [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.005050] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.010651] env[69796]: DEBUG nova.scheduler.client.report [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 784.038329] env[69796]: DEBUG nova.scheduler.client.report [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 784.127265] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234353, 'name': PowerOffVM_Task, 'duration_secs': 0.472227} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.130406] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.131077] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.133591] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f84698f3-0f7c-43c2-8f68-e50deca0ee49 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.143476] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Acquiring lock "55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.143476] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Lock "55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.212982] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.214370] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.214618] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Deleting the datastore file [datastore2] f0d4f167-344a-4828-9f6e-8a62ed8e064d {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.215384] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c28a2a8e-1b20-42b8-a2a9-6664f2cf8801 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.224423] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for the task: (returnval){ [ 784.224423] env[69796]: value = "task-4234356" [ 784.224423] env[69796]: _type = "Task" [ 784.224423] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.244169] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.329122] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234354, 'name': PowerOffVM_Task, 'duration_secs': 0.272358} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.329397] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 784.329598] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 784.329804] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f10f34df-880d-4dad-94ea-5e1e171767a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.363037] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.413983] env[69796]: INFO nova.scheduler.client.report [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Deleted allocations for instance 12b7c520-b21f-48d1-a1fe-6c12dcb713cb [ 784.424316] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 784.424717] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 784.424924] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Deleting the datastore file [datastore2] 3020e505-513b-4b29-996a-6e70a212f508 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 784.428818] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e598a832-d85a-4ec2-8086-d5a4ba8229d9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.445043] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for the task: (returnval){ [ 784.445043] env[69796]: value = "task-4234358" [ 784.445043] env[69796]: _type = "Task" [ 784.445043] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.459769] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.634435] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55010e28-32f1-46f7-840f-b605ac73a5fa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.643690] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0177de9-c65b-4d54-b612-66a9a8088400 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.678034] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e08151a-2693-4c97-8cce-305451d4eeaf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.686631] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccb3468-19ed-4745-9f04-db9618666b31 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.702184] env[69796]: DEBUG nova.compute.provider_tree [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 784.739533] env[69796]: DEBUG oslo_vmware.api [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Task: {'id': task-4234356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177645} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.739715] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.740082] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.740082] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.741843] env[69796]: INFO nova.compute.manager [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 784.741843] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.741843] env[69796]: DEBUG nova.compute.manager [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.741843] env[69796]: DEBUG nova.network.neutron [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.871708] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Releasing lock "refresh_cache-4f4449ab-939d-4d96-9cd0-419a121575cb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.871986] env[69796]: DEBUG nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 784.871986] env[69796]: DEBUG nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.873452] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.940814] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.942311] env[69796]: DEBUG oslo_concurrency.lockutils [None req-db3e51c7-36d8-4a01-ae2d-a41d2c1cff89 tempest-ServersTestManualDisk-1605649959 tempest-ServersTestManualDisk-1605649959-project-member] Lock "12b7c520-b21f-48d1-a1fe-6c12dcb713cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.773s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.958693] env[69796]: DEBUG oslo_vmware.api [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Task: {'id': task-4234358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142289} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.959737] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 784.959737] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 784.959737] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.959949] env[69796]: INFO nova.compute.manager [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Took 1.17 seconds to destroy the instance on the hypervisor. [ 784.960157] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.960652] env[69796]: DEBUG nova.compute.manager [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.960793] env[69796]: DEBUG nova.network.neutron [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 785.235365] env[69796]: ERROR nova.scheduler.client.report [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [req-9225f508-656c-4189-9e57-f63645ea5cef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9225f508-656c-4189-9e57-f63645ea5cef"}]} [ 785.235804] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.841s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.236451] env[69796]: ERROR nova.compute.manager [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Traceback (most recent call last): [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] yield [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self.set_inventory_for_provider( [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 785.236451] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9225f508-656c-4189-9e57-f63645ea5cef"}]} [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] During handling of the above exception, another exception occurred: [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Traceback (most recent call last): [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self._delete_instance(context, instance, bdms) [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 785.236717] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self._complete_deletion(context, instance) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self._update_resource_tracker(context, instance) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self.rt.update_usage(context, instance, instance.node) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] return f(*args, **kwargs) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self._update(context.elevated(), self.compute_nodes[nodename]) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self._update_to_placement(context, compute_node, startup) [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 785.237041] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] return attempt.get(self._wrap_exception) [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] raise value [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self.reportclient.update_from_provider_tree( [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] with catch_all(pd.uuid): [ 785.237429] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 785.237797] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] self.gen.throw(typ, value, traceback) [ 785.237797] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 785.237797] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] raise exception.ResourceProviderSyncFailed() [ 785.237797] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 785.237797] env[69796]: ERROR nova.compute.manager [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] [ 785.243627] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.134s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.245293] env[69796]: INFO nova.compute.claims [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 785.387760] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.387996] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.443945] env[69796]: DEBUG nova.network.neutron [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.445333] env[69796]: DEBUG nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.751609] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.596s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.952057] env[69796]: INFO nova.compute.manager [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: 4f4449ab-939d-4d96-9cd0-419a121575cb] Took 1.08 seconds to deallocate network for instance. [ 785.979055] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.148459] env[69796]: DEBUG nova.network.neutron [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.269880] env[69796]: DEBUG nova.compute.manager [req-8b12f094-bed6-49d4-aa56-9311a6c8a9c2 req-eed14fd9-e8af-452f-8166-6e9d0aa4a9d7 service nova] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Received event network-vif-deleted-55aaf1a4-955a-4984-a881-c49328cd474c {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 786.288592] env[69796]: DEBUG nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 786.310103] env[69796]: DEBUG nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 786.310274] env[69796]: DEBUG nova.compute.provider_tree [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 786.326102] env[69796]: DEBUG nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 786.346824] env[69796]: DEBUG nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 786.498809] env[69796]: DEBUG nova.network.neutron [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.651758] env[69796]: INFO nova.compute.manager [-] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Took 1.91 seconds to deallocate network for instance. [ 786.693347] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Acquiring lock "834de465-9bef-4f8f-8bf6-9d39bc437f58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.693526] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Lock "834de465-9bef-4f8f-8bf6-9d39bc437f58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.831237] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae376ec-bbd7-44c9-b893-1d2f4b000954 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.838242] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ed7c7b-524d-452d-9468-5ac165dd5486 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.870549] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea2af75-c747-43ba-804e-93f2a42704fc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.879968] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3545803d-978a-4c42-9626-68b15e56d5f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.895619] env[69796]: DEBUG nova.compute.provider_tree [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 787.001490] env[69796]: INFO nova.scheduler.client.report [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Deleted allocations for instance 4f4449ab-939d-4d96-9cd0-419a121575cb [ 787.008742] env[69796]: INFO nova.compute.manager [-] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Took 2.05 seconds to deallocate network for instance. [ 787.317411] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.317411] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.428117] env[69796]: ERROR nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [req-7d5a54b4-11e1-4efe-8d81-63ef0eb2cd90] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7d5a54b4-11e1-4efe-8d81-63ef0eb2cd90"}]} [ 787.428117] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.185s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.428565] env[69796]: ERROR nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Traceback (most recent call last): [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] yield [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] self.set_inventory_for_provider( [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 787.428565] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7d5a54b4-11e1-4efe-8d81-63ef0eb2cd90"}]} [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] During handling of the above exception, another exception occurred: [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Traceback (most recent call last): [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] with self.rt.instance_claim(context, instance, node, allocs, [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 787.428897] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] return f(*args, **kwargs) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] self._update(elevated, cn) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] self._update_to_placement(context, compute_node, startup) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] return attempt.get(self._wrap_exception) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] raise value [ 787.429213] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] self.reportclient.update_from_provider_tree( [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] with catch_all(pd.uuid): [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] self.gen.throw(typ, value, traceback) [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] raise exception.ResourceProviderSyncFailed() [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 787.429606] env[69796]: ERROR nova.compute.manager [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] [ 787.429943] env[69796]: DEBUG nova.compute.utils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 787.435276] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.893s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.435276] env[69796]: DEBUG nova.objects.instance [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69796) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 787.441406] env[69796]: DEBUG nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Build of instance 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 787.441406] env[69796]: DEBUG nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 787.441595] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Acquiring lock "refresh_cache-21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.441675] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Acquired lock "refresh_cache-21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.441830] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.514741] env[69796]: DEBUG oslo_concurrency.lockutils [None req-30bcb611-13d2-4aad-8179-25e4e9ed9992 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "4f4449ab-939d-4d96-9cd0-419a121575cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.495s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.521256] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.007046] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.021751] env[69796]: DEBUG nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 788.109827] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Acquiring lock "95857f3f-1503-44d0-a1b3-d087bde80393" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.110393] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Lock "95857f3f-1503-44d0-a1b3-d087bde80393" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.218264] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.458489] env[69796]: DEBUG oslo_concurrency.lockutils [None req-410fab3f-2574-41e1-a25d-7beb0d9af71d tempest-ServersAdmin275Test-730142025 tempest-ServersAdmin275Test-730142025-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.024s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.459664] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.695s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.459896] env[69796]: DEBUG nova.objects.instance [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lazy-loading 'resources' on Instance uuid 8b103adc-9903-406f-8fd1-e193e00cde11 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 788.560716] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.722942] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Releasing lock "refresh_cache-21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.723710] env[69796]: DEBUG nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 788.723710] env[69796]: DEBUG nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 788.723710] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.734076] env[69796]: DEBUG nova.compute.manager [req-6378b77b-4904-498d-925b-aa202ff4f9de req-0e707f5c-c9d5-4792-a24d-58d401dd67d8 service nova] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Received event network-vif-deleted-1e57c340-c4d4-45c7-977f-167e94856c47 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 788.796036] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.981737] env[69796]: DEBUG nova.scheduler.client.report [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 789.010400] env[69796]: DEBUG nova.scheduler.client.report [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 789.011351] env[69796]: DEBUG nova.compute.provider_tree [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.030986] env[69796]: DEBUG nova.scheduler.client.report [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 789.054690] env[69796]: DEBUG nova.scheduler.client.report [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 789.300873] env[69796]: DEBUG nova.network.neutron [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.431375] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "d3620cfb-dd10-4276-b65d-b6041e83ac49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.431375] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "d3620cfb-dd10-4276-b65d-b6041e83ac49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.527034] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605d3a07-49b7-48ad-96ad-bf5375bca731 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.537138] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d512f52-0ddb-49a1-a420-149614ab285f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.575730] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28dee459-89ca-4649-9e31-4445e0a682b7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.586529] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f5ae15-2f6f-4e24-82eb-55c5cac19b1f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.604043] env[69796]: DEBUG nova.compute.provider_tree [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 789.814533] env[69796]: INFO nova.compute.manager [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] [instance: 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6] Took 1.09 seconds to deallocate network for instance. [ 790.055623] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Acquiring lock "c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.055623] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Lock "c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.139306] env[69796]: ERROR nova.scheduler.client.report [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [req-c858343c-cbad-4758-a435-c107eeadecab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c858343c-cbad-4758-a435-c107eeadecab"}]} [ 790.143419] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.684s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.144071] env[69796]: ERROR nova.compute.manager [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Traceback (most recent call last): [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] yield [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self.set_inventory_for_provider( [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 790.144071] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c858343c-cbad-4758-a435-c107eeadecab"}]} [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] During handling of the above exception, another exception occurred: [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Traceback (most recent call last): [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self._delete_instance(context, instance, bdms) [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 790.144397] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self._complete_deletion(context, instance) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self._update_resource_tracker(context, instance) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self.rt.update_usage(context, instance, instance.node) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] return f(*args, **kwargs) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self._update(context.elevated(), self.compute_nodes[nodename]) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self._update_to_placement(context, compute_node, startup) [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 790.144744] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] return attempt.get(self._wrap_exception) [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] six.reraise(self.value[0], self.value[1], self.value[2]) [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] raise value [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self.reportclient.update_from_provider_tree( [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] with catch_all(pd.uuid): [ 790.145130] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 790.145522] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] self.gen.throw(typ, value, traceback) [ 790.145522] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 790.145522] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] raise exception.ResourceProviderSyncFailed() [ 790.145522] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 790.145522] env[69796]: ERROR nova.compute.manager [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] [ 790.147906] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.667s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.147906] env[69796]: DEBUG oslo_concurrency.lockutils [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.147906] env[69796]: INFO nova.compute.manager [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] [instance: 47f223c0-12b0-4eda-ab42-81fe8b95afac] Successfully reverted task state from None on failure for instance. [ 790.149437] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.455s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.150847] env[69796]: INFO nova.compute.claims [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server [None req-900fdd96-de2a-4f34-91e3-1ca364399132 tempest-AttachInterfacesV270Test-221984682 tempest-AttachInterfacesV270Test-221984682-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server yield [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f0f733f3-d2b6-4743-b7d9-680ebc1e6b28"}]} [ 790.161030] env[69796]: ERROR oslo_messaging.rpc.server [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 790.161451] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 790.162013] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 790.162661] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 790.165368] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 790.166375] env[69796]: ERROR oslo_messaging.rpc.server [ 790.669328] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.292s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.869158] env[69796]: INFO nova.scheduler.client.report [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Deleted allocations for instance 21bfeb07-c3d5-402d-84ba-2f22aafd5ae6 [ 791.199233] env[69796]: DEBUG nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 791.220415] env[69796]: DEBUG nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 791.220602] env[69796]: DEBUG nova.compute.provider_tree [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.233601] env[69796]: DEBUG nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 791.254606] env[69796]: DEBUG nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 791.380349] env[69796]: DEBUG oslo_concurrency.lockutils [None req-90293032-4cf6-4e2f-8aa8-d5ef07ddc397 tempest-AttachInterfacesUnderV243Test-1689768257 tempest-AttachInterfacesUnderV243Test-1689768257-project-member] Lock "21bfeb07-c3d5-402d-84ba-2f22aafd5ae6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.306s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.870310] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bf94bf-28c6-442e-a70f-50de3c92edab {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.882284] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2ee288-5c33-4704-8ee0-0f8fbf135371 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.888475] env[69796]: DEBUG nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 791.942773] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150ff517-4a38-4ab1-a565-e5d42713fd32 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.951584] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b62ea59-410e-4731-8226-e1d9a96c99a7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.973776] env[69796]: DEBUG nova.compute.provider_tree [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.194441] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.424642] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.501043] env[69796]: ERROR nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [req-f90bc32c-ab58-475e-b4f0-634e03b9586f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f90bc32c-ab58-475e-b4f0-634e03b9586f"}]} [ 792.501043] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.350s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.501461] env[69796]: ERROR nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Traceback (most recent call last): [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] yield [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] self.set_inventory_for_provider( [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 792.501461] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f90bc32c-ab58-475e-b4f0-634e03b9586f"}]} [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] During handling of the above exception, another exception occurred: [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Traceback (most recent call last): [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] with self.rt.instance_claim(context, instance, node, allocs, [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 792.502141] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] return f(*args, **kwargs) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] self._update(elevated, cn) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] self._update_to_placement(context, compute_node, startup) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] return attempt.get(self._wrap_exception) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] raise value [ 792.504397] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] self.reportclient.update_from_provider_tree( [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] with catch_all(pd.uuid): [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] self.gen.throw(typ, value, traceback) [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] raise exception.ResourceProviderSyncFailed() [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 792.504821] env[69796]: ERROR nova.compute.manager [instance: 5747cb37-539e-4532-a627-282f965a7dd5] [ 792.505257] env[69796]: DEBUG nova.compute.utils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 792.505257] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.933s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.505257] env[69796]: INFO nova.compute.claims [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.511645] env[69796]: DEBUG nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Build of instance 5747cb37-539e-4532-a627-282f965a7dd5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 792.512106] env[69796]: DEBUG nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 792.512326] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquiring lock "refresh_cache-5747cb37-539e-4532-a627-282f965a7dd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.512471] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Acquired lock "refresh_cache-5747cb37-539e-4532-a627-282f965a7dd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.512626] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.051357] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.146096] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.554123] env[69796]: DEBUG nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 793.581072] env[69796]: DEBUG nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 793.581304] env[69796]: DEBUG nova.compute.provider_tree [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 793.608024] env[69796]: DEBUG nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 793.640134] env[69796]: DEBUG nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 793.651050] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Releasing lock "refresh_cache-5747cb37-539e-4532-a627-282f965a7dd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.651300] env[69796]: DEBUG nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 793.651466] env[69796]: DEBUG nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 793.651642] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 793.721778] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.045284] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "a4a16667-cd00-4850-9389-0bd57c7efd74" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.045942] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.045942] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.045942] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.046224] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.050632] env[69796]: INFO nova.compute.manager [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Terminating instance [ 794.142872] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62bd244a-1c30-40f0-ab98-fd737d3351b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.154029] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d903f9-efe0-45e2-af79-97460859f6f7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.203595] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7897a75b-8f00-47dc-8cee-ed0a7b6fbd29 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.212127] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ec02fc-b2f8-4f93-90d5-7919f50c6366 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.226396] env[69796]: DEBUG nova.network.neutron [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.228044] env[69796]: DEBUG nova.compute.provider_tree [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 794.561060] env[69796]: DEBUG nova.compute.manager [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 794.561186] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.562175] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140dd62e-223d-4aa7-bdf8-201ace8d5ebd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.573921] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.573921] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8378d43-4c15-416f-ad5b-74e7abfbda91 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.582978] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 794.582978] env[69796]: value = "task-4234367" [ 794.582978] env[69796]: _type = "Task" [ 794.582978] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.596971] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.622209] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Acquiring lock "6bbe1196-a61f-4260-bddd-64f578acf1dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.622551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Lock "6bbe1196-a61f-4260-bddd-64f578acf1dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.730125] env[69796]: INFO nova.compute.manager [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] [instance: 5747cb37-539e-4532-a627-282f965a7dd5] Took 1.08 seconds to deallocate network for instance. [ 794.754121] env[69796]: ERROR nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [req-14d6759c-34fe-4d85-83a8-4fab5c1c2d83] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-14d6759c-34fe-4d85-83a8-4fab5c1c2d83"}]} [ 794.754525] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.252s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.755484] env[69796]: ERROR nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Traceback (most recent call last): [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] yield [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] self.set_inventory_for_provider( [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 794.755484] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-14d6759c-34fe-4d85-83a8-4fab5c1c2d83"}]} [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] During handling of the above exception, another exception occurred: [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Traceback (most recent call last): [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] with self.rt.instance_claim(context, instance, node, allocs, [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 794.755786] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] return f(*args, **kwargs) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] self._update(elevated, cn) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] self._update_to_placement(context, compute_node, startup) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] return attempt.get(self._wrap_exception) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] six.reraise(self.value[0], self.value[1], self.value[2]) [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] raise value [ 794.756139] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] self.reportclient.update_from_provider_tree( [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] with catch_all(pd.uuid): [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] self.gen.throw(typ, value, traceback) [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] raise exception.ResourceProviderSyncFailed() [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 794.756519] env[69796]: ERROR nova.compute.manager [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] [ 794.756832] env[69796]: DEBUG nova.compute.utils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 794.758078] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.895s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.758245] env[69796]: DEBUG nova.objects.instance [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lazy-loading 'resources' on Instance uuid 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.765020] env[69796]: DEBUG nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Build of instance 274e4d87-ec17-4210-a0fb-e226d29ed0d3 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 794.765020] env[69796]: DEBUG nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 794.765020] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Acquiring lock "refresh_cache-274e4d87-ec17-4210-a0fb-e226d29ed0d3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.765020] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Acquired lock "refresh_cache-274e4d87-ec17-4210-a0fb-e226d29ed0d3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.765315] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 795.095152] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234367, 'name': PowerOffVM_Task, 'duration_secs': 0.229036} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.096514] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.096953] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.097327] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-369ac0b6-f8e7-4dc3-b3e2-9943ffc7304b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.169259] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.169259] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.169259] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Deleting the datastore file [datastore1] a4a16667-cd00-4850-9389-0bd57c7efd74 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.169259] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df446dbf-8982-4929-be03-bc5f13740bc7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.178019] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for the task: (returnval){ [ 795.178019] env[69796]: value = "task-4234369" [ 795.178019] env[69796]: _type = "Task" [ 795.178019] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.185695] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234369, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.283625] env[69796]: DEBUG nova.scheduler.client.report [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 795.312054] env[69796]: DEBUG nova.scheduler.client.report [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 795.312054] env[69796]: DEBUG nova.compute.provider_tree [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 795.335580] env[69796]: DEBUG nova.scheduler.client.report [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 795.361445] env[69796]: DEBUG nova.scheduler.client.report [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 795.536782] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 795.689107] env[69796]: DEBUG oslo_vmware.api [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Task: {'id': task-4234369, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152487} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.691950] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.692170] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.692347] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.692514] env[69796]: INFO nova.compute.manager [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Took 1.13 seconds to destroy the instance on the hypervisor. [ 795.692760] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.694416] env[69796]: DEBUG nova.compute.manager [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 795.694511] env[69796]: DEBUG nova.network.neutron [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.718598] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.778759] env[69796]: INFO nova.scheduler.client.report [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Deleted allocations for instance 5747cb37-539e-4532-a627-282f965a7dd5 [ 795.791246] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "efee7c1e-4d8c-450c-924b-9d7d15095740" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.791681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "efee7c1e-4d8c-450c-924b-9d7d15095740" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.951965] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95440698-7a85-4e25-8706-8ad69bb1bdd4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.961938] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5481ecb-1b8e-49d5-b8ca-3bc3b52db8bc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.004126] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6377bc5c-80b1-4a2b-8557-7cb5ee0ade7a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.013589] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643912a6-918d-4d1e-a95d-025fcfacee24 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.029655] env[69796]: DEBUG nova.compute.provider_tree [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 796.222240] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Releasing lock "refresh_cache-274e4d87-ec17-4210-a0fb-e226d29ed0d3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.224581] env[69796]: DEBUG nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 796.224581] env[69796]: DEBUG nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 796.224581] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 796.266023] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 796.292495] env[69796]: DEBUG oslo_concurrency.lockutils [None req-54feb9b3-6294-427e-a4f5-f37ceadf154f tempest-DeleteServersAdminTestJSON-1408950653 tempest-DeleteServersAdminTestJSON-1408950653-project-member] Lock "5747cb37-539e-4532-a627-282f965a7dd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.898s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.552412] env[69796]: ERROR nova.scheduler.client.report [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [req-cab7c74b-e124-448c-8d77-f2b344981dcb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-cab7c74b-e124-448c-8d77-f2b344981dcb"}]} [ 796.552669] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.553686] env[69796]: ERROR nova.compute.manager [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Traceback (most recent call last): [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] yield [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self.set_inventory_for_provider( [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 796.553686] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-cab7c74b-e124-448c-8d77-f2b344981dcb"}]} [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] During handling of the above exception, another exception occurred: [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Traceback (most recent call last): [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self._delete_instance(context, instance, bdms) [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 796.554175] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self._complete_deletion(context, instance) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self._update_resource_tracker(context, instance) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self.rt.update_usage(context, instance, instance.node) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] return f(*args, **kwargs) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self._update(context.elevated(), self.compute_nodes[nodename]) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self._update_to_placement(context, compute_node, startup) [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 796.555465] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] return attempt.get(self._wrap_exception) [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] six.reraise(self.value[0], self.value[1], self.value[2]) [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] raise value [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self.reportclient.update_from_provider_tree( [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] with catch_all(pd.uuid): [ 796.557213] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 796.557634] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] self.gen.throw(typ, value, traceback) [ 796.557634] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 796.557634] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] raise exception.ResourceProviderSyncFailed() [ 796.557634] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 796.557634] env[69796]: ERROR nova.compute.manager [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] [ 796.557634] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.762s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.559041] env[69796]: INFO nova.compute.claims [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.669431] env[69796]: DEBUG nova.compute.manager [req-8a7199e6-5bdb-4593-94c6-15b89ac1ef5e req-4f65964b-f05f-403d-b8eb-0a6a3011522a service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Received event network-vif-deleted-327722fe-c5c2-466f-92ca-c218e5304f72 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 796.669431] env[69796]: INFO nova.compute.manager [req-8a7199e6-5bdb-4593-94c6-15b89ac1ef5e req-4f65964b-f05f-403d-b8eb-0a6a3011522a service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Neutron deleted interface 327722fe-c5c2-466f-92ca-c218e5304f72; detaching it from the instance and deleting it from the info cache [ 796.669549] env[69796]: DEBUG nova.network.neutron [req-8a7199e6-5bdb-4593-94c6-15b89ac1ef5e req-4f65964b-f05f-403d-b8eb-0a6a3011522a service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.697761] env[69796]: DEBUG nova.network.neutron [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.769557] env[69796]: DEBUG nova.network.neutron [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.794329] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "62354021-035a-48b1-b22c-bd12cadbdacd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.794547] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "62354021-035a-48b1-b22c-bd12cadbdacd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.796359] env[69796]: DEBUG nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 797.063904] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.601s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.175629] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cebae735-4ba2-436d-97f5-271580a26a65 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.193303] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cadffcd-7967-4661-a0a5-c8e7af5b2f32 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.214495] env[69796]: INFO nova.compute.manager [-] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Took 1.52 seconds to deallocate network for instance. [ 797.245799] env[69796]: DEBUG nova.compute.manager [req-8a7199e6-5bdb-4593-94c6-15b89ac1ef5e req-4f65964b-f05f-403d-b8eb-0a6a3011522a service nova] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Detach interface failed, port_id=327722fe-c5c2-466f-92ca-c218e5304f72, reason: Instance a4a16667-cd00-4850-9389-0bd57c7efd74 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 797.273143] env[69796]: INFO nova.compute.manager [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] [instance: 274e4d87-ec17-4210-a0fb-e226d29ed0d3] Took 1.05 seconds to deallocate network for instance. [ 797.327723] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.593199] env[69796]: DEBUG nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 797.613304] env[69796]: DEBUG nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 797.613553] env[69796]: DEBUG nova.compute.provider_tree [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.636331] env[69796]: DEBUG nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 797.665334] env[69796]: DEBUG nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 797.726775] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.814233] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "f1f47f34-d16d-4eba-907f-08d707683941" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.814595] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "f1f47f34-d16d-4eba-907f-08d707683941" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.247705] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cbd8aa-aabf-4e44-8e92-c7c6ba67c995 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.257041] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664345fd-6fb1-416e-bff4-5cbbc43cbd2b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.294057] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5f81ee-1fad-4df6-b773-4e26580726ef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.302679] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0a28ad-928c-4f74-bace-77400062b5d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.318793] env[69796]: DEBUG nova.compute.provider_tree [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.324046] env[69796]: INFO nova.scheduler.client.report [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Deleted allocations for instance 274e4d87-ec17-4210-a0fb-e226d29ed0d3 [ 798.584823] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.836680] env[69796]: DEBUG oslo_concurrency.lockutils [None req-402a394c-0d5c-4652-a9c2-a1ddc386e37e tempest-ServerMetadataTestJSON-1489874382 tempest-ServerMetadataTestJSON-1489874382-project-member] Lock "274e4d87-ec17-4210-a0fb-e226d29ed0d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.245s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.861432] env[69796]: ERROR nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [req-1de56386-a626-4069-a716-201ea55a704f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1de56386-a626-4069-a716-201ea55a704f"}]} [ 798.861432] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.862087] env[69796]: ERROR nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Traceback (most recent call last): [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] yield [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] self.set_inventory_for_provider( [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 798.862087] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1de56386-a626-4069-a716-201ea55a704f"}]} [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] During handling of the above exception, another exception occurred: [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Traceback (most recent call last): [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] with self.rt.instance_claim(context, instance, node, allocs, [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 798.862625] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] return f(*args, **kwargs) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] self._update(elevated, cn) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] self._update_to_placement(context, compute_node, startup) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] return attempt.get(self._wrap_exception) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] six.reraise(self.value[0], self.value[1], self.value[2]) [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] raise value [ 798.862917] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] self.reportclient.update_from_provider_tree( [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] with catch_all(pd.uuid): [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] self.gen.throw(typ, value, traceback) [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] raise exception.ResourceProviderSyncFailed() [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 798.863797] env[69796]: ERROR nova.compute.manager [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] [ 798.865038] env[69796]: DEBUG nova.compute.utils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 798.865634] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.045s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.869538] env[69796]: INFO nova.compute.claims [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 798.874696] env[69796]: DEBUG nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Build of instance 74b17bd9-66c4-4a88-b3de-fc5f720f4eca was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 798.877564] env[69796]: DEBUG nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 798.877564] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "refresh_cache-74b17bd9-66c4-4a88-b3de-fc5f720f4eca" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.877564] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquired lock "refresh_cache-74b17bd9-66c4-4a88-b3de-fc5f720f4eca" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.877564] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.343034] env[69796]: DEBUG nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 799.426235] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.677813] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.810191] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "be94a635-f83f-46a2-957d-bc07e2e8abe6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.810191] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "be94a635-f83f-46a2-957d-bc07e2e8abe6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.842077] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "d2187050-87df-4167-b5e4-2a21a31145be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.842077] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "d2187050-87df-4167-b5e4-2a21a31145be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.892505] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.924474] env[69796]: DEBUG nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 799.940518] env[69796]: DEBUG nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 799.940762] env[69796]: DEBUG nova.compute.provider_tree [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 799.954736] env[69796]: DEBUG nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 799.982448] env[69796]: DEBUG nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 800.180503] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Releasing lock "refresh_cache-74b17bd9-66c4-4a88-b3de-fc5f720f4eca" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.183561] env[69796]: DEBUG nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 800.183561] env[69796]: DEBUG nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 800.183561] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.228291] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.629536] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88579abc-2e89-4b37-8618-f19a1a40fa40 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.640623] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3791a5-e8b4-418b-b8a8-f9f47f86962d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.680279] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7502762-6858-4bfa-8c65-27fb21ee3b88 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.687391] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b150b023-1d37-44e6-b438-57aaf25bfca7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.709021] env[69796]: DEBUG nova.compute.provider_tree [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 800.731590] env[69796]: DEBUG nova.network.neutron [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.237859] env[69796]: ERROR nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [req-d30220d2-cd6b-4503-a9cd-dda953e18208] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d30220d2-cd6b-4503-a9cd-dda953e18208"}]} [ 801.238524] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.240113] env[69796]: ERROR nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Traceback (most recent call last): [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] yield [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] self.set_inventory_for_provider( [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 801.240113] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d30220d2-cd6b-4503-a9cd-dda953e18208"}]} [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] During handling of the above exception, another exception occurred: [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Traceback (most recent call last): [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] with self.rt.instance_claim(context, instance, node, allocs, [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 801.240401] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] return f(*args, **kwargs) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] self._update(elevated, cn) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] self._update_to_placement(context, compute_node, startup) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] return attempt.get(self._wrap_exception) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] six.reraise(self.value[0], self.value[1], self.value[2]) [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] raise value [ 801.240970] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] self.reportclient.update_from_provider_tree( [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] with catch_all(pd.uuid): [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] self.gen.throw(typ, value, traceback) [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] raise exception.ResourceProviderSyncFailed() [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 801.241394] env[69796]: ERROR nova.compute.manager [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] [ 801.241805] env[69796]: DEBUG nova.compute.utils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 801.244701] env[69796]: INFO nova.compute.manager [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: 74b17bd9-66c4-4a88-b3de-fc5f720f4eca] Took 1.06 seconds to deallocate network for instance. [ 801.248721] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.399s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.249043] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.249320] env[69796]: INFO nova.compute.manager [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] [instance: 47005af8-11fe-498f-9b67-e0316faeeb8f] Successfully reverted task state from None on failure for instance. [ 801.254630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.200s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.257787] env[69796]: INFO nova.compute.claims [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 801.265081] env[69796]: DEBUG nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Build of instance 2218ece0-5246-451d-9bdc-8fd01cfe6ec3 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 801.265836] env[69796]: DEBUG nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 801.266184] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "refresh_cache-2218ece0-5246-451d-9bdc-8fd01cfe6ec3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.266425] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquired lock "refresh_cache-2218ece0-5246-451d-9bdc-8fd01cfe6ec3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.267208] env[69796]: DEBUG nova.network.neutron [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.268648] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.269942] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server [None req-6ceb92f1-3ab7-4b1e-8f31-461a4c68421b tempest-ServerExternalEventsTest-763372503 tempest-ServerExternalEventsTest-763372503-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server yield [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-578fb2e7-0ae8-4b9d-b248-6f3ba564acca"}]} [ 801.282665] env[69796]: ERROR oslo_messaging.rpc.server [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 801.283062] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 801.283833] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 801.284345] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 801.284684] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 801.285041] env[69796]: ERROR oslo_messaging.rpc.server [ 801.784635] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Acquiring lock "9e84f3cf-fae6-474c-b86b-7cd67d986d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.787335] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Lock "9e84f3cf-fae6-474c-b86b-7cd67d986d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.790336] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.790336] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.790734] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.790734] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.790786] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.791185] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.791705] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 801.791705] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.821017] env[69796]: DEBUG nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 801.838317] env[69796]: DEBUG nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 801.838550] env[69796]: DEBUG nova.compute.provider_tree [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 801.854818] env[69796]: DEBUG nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 801.882616] env[69796]: DEBUG nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 801.964189] env[69796]: DEBUG nova.network.neutron [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.119126] env[69796]: DEBUG nova.network.neutron [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.296577] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.316325] env[69796]: INFO nova.scheduler.client.report [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Deleted allocations for instance 74b17bd9-66c4-4a88-b3de-fc5f720f4eca [ 802.517680] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97f36ba-0650-41b1-8a33-25160e19fe05 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.532137] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e83da5-4e9a-4ea4-a237-c5e4e3f1db69 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.572277] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd1d156-44ac-41f8-9d9a-95a9b7b19e3b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.583702] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce01ffd0-53eb-498d-8bce-a79b9f9f6492 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.601387] env[69796]: DEBUG nova.compute.provider_tree [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 802.625522] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Releasing lock "refresh_cache-2218ece0-5246-451d-9bdc-8fd01cfe6ec3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.626281] env[69796]: DEBUG nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 802.626561] env[69796]: DEBUG nova.compute.manager [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: 2218ece0-5246-451d-9bdc-8fd01cfe6ec3] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 802.828186] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ee3e22d2-dcfa-476c-84d6-457d83f4ee55 tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "74b17bd9-66c4-4a88-b3de-fc5f720f4eca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.914s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.140210] env[69796]: ERROR nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [req-0a9ea84a-1203-49e6-acf3-54db0c683c30] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0a9ea84a-1203-49e6-acf3-54db0c683c30"}]} [ 803.141159] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.886s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.143081] env[69796]: ERROR nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Traceback (most recent call last): [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] yield [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] self.set_inventory_for_provider( [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 803.143081] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0a9ea84a-1203-49e6-acf3-54db0c683c30"}]} [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] During handling of the above exception, another exception occurred: [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Traceback (most recent call last): [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] with self.rt.instance_claim(context, instance, node, allocs, [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 803.143297] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] return f(*args, **kwargs) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] self._update(elevated, cn) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] self._update_to_placement(context, compute_node, startup) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] return attempt.get(self._wrap_exception) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] raise value [ 803.143521] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] self.reportclient.update_from_provider_tree( [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] with catch_all(pd.uuid): [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] self.gen.throw(typ, value, traceback) [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] raise exception.ResourceProviderSyncFailed() [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 803.143812] env[69796]: ERROR nova.compute.manager [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] [ 803.144633] env[69796]: DEBUG nova.compute.utils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 803.144633] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.478s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.146051] env[69796]: INFO nova.compute.claims [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.150545] env[69796]: DEBUG nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Build of instance 659ec1bd-2be6-4f40-b513-a907b77f2ebb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 803.151132] env[69796]: DEBUG nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 803.151404] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Acquiring lock "refresh_cache-659ec1bd-2be6-4f40-b513-a907b77f2ebb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.151585] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Acquired lock "refresh_cache-659ec1bd-2be6-4f40-b513-a907b77f2ebb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.151750] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 803.334144] env[69796]: DEBUG nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 803.672273] env[69796]: INFO nova.scheduler.client.report [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Deleted allocations for instance 2218ece0-5246-451d-9bdc-8fd01cfe6ec3 [ 803.698412] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.792166] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.862902] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.184173] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f41f99b0-91c6-4427-9e81-18703f5beb68 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "2218ece0-5246-451d-9bdc-8fd01cfe6ec3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.061s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.189617] env[69796]: DEBUG nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 804.208222] env[69796]: DEBUG nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 804.208451] env[69796]: DEBUG nova.compute.provider_tree [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 804.222575] env[69796]: DEBUG nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 804.222789] env[69796]: DEBUG nova.compute.provider_tree [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 generation from 52 to 53 during operation: update_aggregates {{(pid=69796) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 804.242441] env[69796]: DEBUG nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 804.296711] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Releasing lock "refresh_cache-659ec1bd-2be6-4f40-b513-a907b77f2ebb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.297655] env[69796]: DEBUG nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 804.299418] env[69796]: DEBUG nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 804.299611] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.322297] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.657436] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Acquiring lock "a754473f-2fb1-4018-9b61-9983bff07bd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.657436] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Lock "a754473f-2fb1-4018-9b61-9983bff07bd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.697995] env[69796]: DEBUG nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 804.780410] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68ee5690-3808-4857-aa12-d6166eaf5dc3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.791941] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92583aa0-757c-44ab-b02a-8bd171653119 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.824282] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debc9778-6450-44fb-9b77-3d82cbb43f3a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.828420] env[69796]: DEBUG nova.network.neutron [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.835540] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aff7a938-1f9f-4303-a2f3-d34ba777bffd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.851239] env[69796]: DEBUG nova.compute.provider_tree [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 805.226134] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.334019] env[69796]: INFO nova.compute.manager [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] [instance: 659ec1bd-2be6-4f40-b513-a907b77f2ebb] Took 1.03 seconds to deallocate network for instance. [ 805.383444] env[69796]: ERROR nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [req-96784e44-7173-4913-bfb6-f0c59fbae383] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-96784e44-7173-4913-bfb6-f0c59fbae383"}]} [ 805.383834] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.384431] env[69796]: ERROR nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Traceback (most recent call last): [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] yield [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] self.set_inventory_for_provider( [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 805.384431] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-96784e44-7173-4913-bfb6-f0c59fbae383"}]} [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] During handling of the above exception, another exception occurred: [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Traceback (most recent call last): [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] with self.rt.instance_claim(context, instance, node, allocs, [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 805.384752] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] return f(*args, **kwargs) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] self._update(elevated, cn) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] self._update_to_placement(context, compute_node, startup) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] return attempt.get(self._wrap_exception) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] six.reraise(self.value[0], self.value[1], self.value[2]) [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] raise value [ 805.385084] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] self.reportclient.update_from_provider_tree( [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] with catch_all(pd.uuid): [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] self.gen.throw(typ, value, traceback) [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] raise exception.ResourceProviderSyncFailed() [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 805.385363] env[69796]: ERROR nova.compute.manager [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] [ 805.385600] env[69796]: DEBUG nova.compute.utils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 805.386349] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.353s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.387864] env[69796]: INFO nova.compute.claims [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.393215] env[69796]: DEBUG nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Build of instance b97945f4-5c5b-4a98-adac-8337a7ed9011 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 805.393215] env[69796]: DEBUG nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 805.393215] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquiring lock "refresh_cache-b97945f4-5c5b-4a98-adac-8337a7ed9011" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.393215] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Acquired lock "refresh_cache-b97945f4-5c5b-4a98-adac-8337a7ed9011" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.393215] env[69796]: DEBUG nova.network.neutron [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.925454] env[69796]: DEBUG nova.network.neutron [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.998772] env[69796]: DEBUG nova.network.neutron [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.360708] env[69796]: INFO nova.scheduler.client.report [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Deleted allocations for instance 659ec1bd-2be6-4f40-b513-a907b77f2ebb [ 806.418751] env[69796]: DEBUG nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 806.434537] env[69796]: DEBUG nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 806.434768] env[69796]: DEBUG nova.compute.provider_tree [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.447309] env[69796]: DEBUG nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 806.464129] env[69796]: DEBUG nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 806.501235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Releasing lock "refresh_cache-b97945f4-5c5b-4a98-adac-8337a7ed9011" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.501703] env[69796]: DEBUG nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 806.501703] env[69796]: DEBUG nova.compute.manager [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] [instance: b97945f4-5c5b-4a98-adac-8337a7ed9011] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 806.844294] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Acquiring lock "e7c0df98-424a-45c4-9bb6-1daf148dcb04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.844294] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Lock "e7c0df98-424a-45c4-9bb6-1daf148dcb04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.876809] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5322f199-0bd2-474b-ae5a-a503245ed8cd tempest-FloatingIPsAssociationNegativeTestJSON-1096689969 tempest-FloatingIPsAssociationNegativeTestJSON-1096689969-project-member] Lock "659ec1bd-2be6-4f40-b513-a907b77f2ebb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.263s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.962951] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f0d9aa-ceb3-4ef3-8d13-257515c8cba6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.972936] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd67347a-43cd-4219-b43f-6f1b2de96e5b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.005016] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25daa35a-c534-433d-a1a7-c4a321755c06 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.016253] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90637564-29d5-4a0e-afb6-628ca764e94b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.032051] env[69796]: DEBUG nova.compute.provider_tree [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 807.379500] env[69796]: DEBUG nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 807.537255] env[69796]: INFO nova.scheduler.client.report [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Deleted allocations for instance b97945f4-5c5b-4a98-adac-8337a7ed9011 [ 807.569442] env[69796]: ERROR nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [req-ea213e55-da98-4fee-9354-a832b512dddf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ea213e55-da98-4fee-9354-a832b512dddf"}]} [ 807.570321] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.570500] env[69796]: ERROR nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Traceback (most recent call last): [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] yield [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] self.set_inventory_for_provider( [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 807.570500] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ea213e55-da98-4fee-9354-a832b512dddf"}]} [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] During handling of the above exception, another exception occurred: [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Traceback (most recent call last): [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] with self.rt.instance_claim(context, instance, node, allocs, [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 807.570740] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] return f(*args, **kwargs) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] self._update(elevated, cn) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] self._update_to_placement(context, compute_node, startup) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] return attempt.get(self._wrap_exception) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] six.reraise(self.value[0], self.value[1], self.value[2]) [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] raise value [ 807.571072] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] self.reportclient.update_from_provider_tree( [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] with catch_all(pd.uuid): [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] self.gen.throw(typ, value, traceback) [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] raise exception.ResourceProviderSyncFailed() [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 807.571378] env[69796]: ERROR nova.compute.manager [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] [ 807.572043] env[69796]: DEBUG nova.compute.utils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 807.572923] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.595s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.575782] env[69796]: INFO nova.compute.claims [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.581109] env[69796]: DEBUG nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Build of instance 46c6028e-1282-4585-bc96-58e0c036b5ef was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 807.581109] env[69796]: DEBUG nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 807.581109] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "refresh_cache-46c6028e-1282-4585-bc96-58e0c036b5ef" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.581109] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquired lock "refresh_cache-46c6028e-1282-4585-bc96-58e0c036b5ef" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.581285] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 807.904441] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.046432] env[69796]: DEBUG oslo_concurrency.lockutils [None req-1eb0a0fc-fac5-4260-9ac4-2364e1dc4ce2 tempest-ServerShowV247Test-420476759 tempest-ServerShowV247Test-420476759-project-member] Lock "b97945f4-5c5b-4a98-adac-8337a7ed9011" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.239s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.113169] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 808.194507] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.550333] env[69796]: DEBUG nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 808.612995] env[69796]: DEBUG nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 808.643225] env[69796]: DEBUG nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 808.643225] env[69796]: DEBUG nova.compute.provider_tree [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 808.655076] env[69796]: DEBUG nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 808.677288] env[69796]: DEBUG nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 808.699076] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Releasing lock "refresh_cache-46c6028e-1282-4585-bc96-58e0c036b5ef" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.699225] env[69796]: DEBUG nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 808.699567] env[69796]: DEBUG nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.699898] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.735628] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 809.082973] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.136742] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.139151] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.223572] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d51bc5-f649-4eae-93aa-38b0fb8c3f6e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.235128] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1365e116-6da4-44c2-93a4-fbd27ec885c1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.241152] env[69796]: DEBUG nova.network.neutron [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.269924] env[69796]: INFO nova.compute.manager [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 46c6028e-1282-4585-bc96-58e0c036b5ef] Took 0.57 seconds to deallocate network for instance. [ 809.273101] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b718e26c-5587-416a-aad3-75d5d86a026c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.282705] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc74e106-1dd7-40d4-94af-be9c840b9fb1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.297948] env[69796]: DEBUG nova.compute.provider_tree [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 809.838817] env[69796]: ERROR nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [req-9bd452ed-54b6-4705-9411-242ea744a074] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9bd452ed-54b6-4705-9411-242ea744a074"}]} [ 809.839808] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.840293] env[69796]: ERROR nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] Traceback (most recent call last): [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] yield [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] self.set_inventory_for_provider( [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 809.840293] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9bd452ed-54b6-4705-9411-242ea744a074"}]} [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] During handling of the above exception, another exception occurred: [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] Traceback (most recent call last): [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] with self.rt.instance_claim(context, instance, node, allocs, [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 809.842660] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] return f(*args, **kwargs) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] self._update(elevated, cn) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] self._update_to_placement(context, compute_node, startup) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] return attempt.get(self._wrap_exception) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] raise value [ 809.843066] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] self.reportclient.update_from_provider_tree( [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] with catch_all(pd.uuid): [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] self.gen.throw(typ, value, traceback) [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] raise exception.ResourceProviderSyncFailed() [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 809.843375] env[69796]: ERROR nova.compute.manager [instance: 10c782a6-6507-482e-8671-2278375a68fc] [ 809.843664] env[69796]: DEBUG nova.compute.utils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 809.843664] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.679s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.843664] env[69796]: DEBUG nova.objects.instance [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lazy-loading 'resources' on Instance uuid f0d4f167-344a-4828-9f6e-8a62ed8e064d {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.847306] env[69796]: DEBUG nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Build of instance 10c782a6-6507-482e-8671-2278375a68fc was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 809.847750] env[69796]: DEBUG nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 809.847996] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "refresh_cache-10c782a6-6507-482e-8671-2278375a68fc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.850811] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquired lock "refresh_cache-10c782a6-6507-482e-8671-2278375a68fc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.850811] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.994893] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.995387] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.305131] env[69796]: INFO nova.scheduler.client.report [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Deleted allocations for instance 46c6028e-1282-4585-bc96-58e0c036b5ef [ 810.374716] env[69796]: DEBUG nova.scheduler.client.report [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 810.378887] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.400924] env[69796]: DEBUG nova.scheduler.client.report [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 810.401135] env[69796]: DEBUG nova.compute.provider_tree [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.420966] env[69796]: DEBUG nova.scheduler.client.report [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 810.441318] env[69796]: DEBUG nova.scheduler.client.report [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 810.455588] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.499335] env[69796]: DEBUG nova.compute.utils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 810.813913] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6813223-7dc7-4975-a524-208956dd4f60 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "46c6028e-1282-4585-bc96-58e0c036b5ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.959s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.845448] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc5f1fa-d100-462a-9fa1-cdf955d40a3a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.854029] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9272b2c9-375c-40a1-8cfa-6d88375207ec {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.887498] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b1e76-1d8a-4b89-916b-de2eda143beb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.896432] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05369f57-d9b2-4100-a587-c9807f4330d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.913592] env[69796]: DEBUG nova.compute.provider_tree [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 810.958783] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Releasing lock "refresh_cache-10c782a6-6507-482e-8671-2278375a68fc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.959131] env[69796]: DEBUG nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 810.959339] env[69796]: DEBUG nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.959511] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.976285] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.002743] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.317151] env[69796]: DEBUG nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 811.439378] env[69796]: ERROR nova.scheduler.client.report [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [req-99ed3f50-8edc-4970-8489-82efa8356569] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-99ed3f50-8edc-4970-8489-82efa8356569"}]} [ 811.439779] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.440409] env[69796]: ERROR nova.compute.manager [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Traceback (most recent call last): [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] yield [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self.set_inventory_for_provider( [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 811.440409] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-99ed3f50-8edc-4970-8489-82efa8356569"}]} [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] During handling of the above exception, another exception occurred: [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Traceback (most recent call last): [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self._delete_instance(context, instance, bdms) [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 811.440599] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self._complete_deletion(context, instance) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self._update_resource_tracker(context, instance) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self.rt.update_usage(context, instance, instance.node) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] return f(*args, **kwargs) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self._update(context.elevated(), self.compute_nodes[nodename]) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self._update_to_placement(context, compute_node, startup) [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 811.440822] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] return attempt.get(self._wrap_exception) [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] raise value [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self.reportclient.update_from_provider_tree( [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] with catch_all(pd.uuid): [ 811.441143] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 811.441419] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] self.gen.throw(typ, value, traceback) [ 811.441419] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 811.441419] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] raise exception.ResourceProviderSyncFailed() [ 811.441419] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.441419] env[69796]: ERROR nova.compute.manager [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] [ 811.443086] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.173s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.443334] env[69796]: DEBUG oslo_concurrency.lockutils [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.443543] env[69796]: INFO nova.compute.manager [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] [instance: 7f37f6c9-adba-4292-9d47-c455f77e539f] Successfully reverted task state from None on failure for instance. [ 811.445791] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.925s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.446138] env[69796]: DEBUG nova.objects.instance [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lazy-loading 'resources' on Instance uuid 3020e505-513b-4b29-996a-6e70a212f508 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server [None req-323da3c5-99eb-4de3-b610-70ceac01bb43 tempest-ServersAaction247Test-1986289156 tempest-ServersAaction247Test-1986289156-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server yield [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9225f508-656c-4189-9e57-f63645ea5cef"}]} [ 811.452507] env[69796]: ERROR oslo_messaging.rpc.server [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 811.452843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 811.453307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 811.453726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 811.454132] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 811.454606] env[69796]: ERROR oslo_messaging.rpc.server [ 811.480432] env[69796]: DEBUG nova.network.neutron [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.848569] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.949785] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.878s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.983115] env[69796]: INFO nova.compute.manager [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: 10c782a6-6507-482e-8671-2278375a68fc] Took 1.02 seconds to deallocate network for instance. [ 811.990951] env[69796]: DEBUG nova.scheduler.client.report [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 812.012909] env[69796]: DEBUG nova.scheduler.client.report [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 812.012909] env[69796]: DEBUG nova.compute.provider_tree [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.028108] env[69796]: DEBUG nova.scheduler.client.report [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 812.051676] env[69796]: DEBUG nova.scheduler.client.report [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 812.104317] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.104556] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.104775] env[69796]: INFO nova.compute.manager [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Attaching volume 84c4c325-a200-464b-96ea-621279364fc9 to /dev/sdb [ 812.156655] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97a0965-b7b2-40c5-aeff-2b4fa2772cc2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.169343] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf55af81-5264-4515-abd8-12172b6d1136 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.184174] env[69796]: DEBUG nova.virt.block_device [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating existing volume attachment record: 835b225d-de15-4ecc-99d9-e73c653c0307 {{(pid=69796) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 812.547812] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5e123e-a789-4d5e-bc78-eda9da2de6bc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.553555] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfabd6b-3deb-4468-910a-22c08c15ed72 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.588033] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3504abc-85f1-413b-a677-3a6be62e173b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.597227] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dab3c9c-5db7-4899-97ed-4e5d38e085ff {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.613182] env[69796]: DEBUG nova.compute.provider_tree [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.885139] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.885139] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.885139] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.885139] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.885486] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.887317] env[69796]: INFO nova.compute.manager [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Terminating instance [ 812.929668] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Acquiring lock "f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.930104] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Lock "f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.040726] env[69796]: INFO nova.scheduler.client.report [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Deleted allocations for instance 10c782a6-6507-482e-8671-2278375a68fc [ 813.138136] env[69796]: ERROR nova.scheduler.client.report [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [req-df35df7f-9b99-4fa8-bcad-c572b13588c9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-df35df7f-9b99-4fa8-bcad-c572b13588c9"}]} [ 813.138593] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.693s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.139253] env[69796]: ERROR nova.compute.manager [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] Traceback (most recent call last): [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] yield [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self.set_inventory_for_provider( [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 813.139253] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-df35df7f-9b99-4fa8-bcad-c572b13588c9"}]} [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] During handling of the above exception, another exception occurred: [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] Traceback (most recent call last): [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self._delete_instance(context, instance, bdms) [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 813.139676] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self._complete_deletion(context, instance) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self._update_resource_tracker(context, instance) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self.rt.update_usage(context, instance, instance.node) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] return f(*args, **kwargs) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self._update(context.elevated(), self.compute_nodes[nodename]) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self._update_to_placement(context, compute_node, startup) [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 813.139986] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] return attempt.get(self._wrap_exception) [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] six.reraise(self.value[0], self.value[1], self.value[2]) [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] raise value [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self.reportclient.update_from_provider_tree( [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] with catch_all(pd.uuid): [ 813.140260] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 813.140556] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] self.gen.throw(typ, value, traceback) [ 813.140556] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 813.140556] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] raise exception.ResourceProviderSyncFailed() [ 813.140556] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 813.140556] env[69796]: ERROR nova.compute.manager [instance: 3020e505-513b-4b29-996a-6e70a212f508] [ 813.141562] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.581s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.146130] env[69796]: INFO nova.compute.claims [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 813.391717] env[69796]: DEBUG nova.compute.manager [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 813.392130] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 813.394113] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ffa590-f539-4d88-845b-a79d72bb2ce7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.405017] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 813.405429] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9861f2c-de82-4ce1-959a-10eae63e658f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.414739] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 813.414739] env[69796]: value = "task-4234388" [ 813.414739] env[69796]: _type = "Task" [ 813.414739] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.428284] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.463788] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.552112] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e514ec6d-5720-4b36-9e11-89eecf1ee836 tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "10c782a6-6507-482e-8671-2278375a68fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.140s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.648321] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "3020e505-513b-4b29-996a-6e70a212f508" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.369s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.933452] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234388, 'name': PowerOffVM_Task, 'duration_secs': 0.219286} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.933844] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.934472] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.934789] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-672c165d-f122-4c4b-9372-d661ce566241 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.016212] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.018951] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.020285] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Deleting the datastore file [datastore1] 836605ee-50cb-48b0-ba2e-33db3832f8ba {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.020732] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ede036df-073e-438f-96aa-c70cc0ce26f2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.029368] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for the task: (returnval){ [ 814.029368] env[69796]: value = "task-4234390" [ 814.029368] env[69796]: _type = "Task" [ 814.029368] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.045923] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.056209] env[69796]: DEBUG nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 814.185245] env[69796]: DEBUG nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 814.207542] env[69796]: DEBUG nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 814.207542] env[69796]: DEBUG nova.compute.provider_tree [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 814.226987] env[69796]: DEBUG nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 814.255274] env[69796]: DEBUG nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 814.542731] env[69796]: DEBUG oslo_vmware.api [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Task: {'id': task-4234390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155295} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.546470] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.546470] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.546470] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.546861] env[69796]: INFO nova.compute.manager [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Took 1.15 seconds to destroy the instance on the hypervisor. [ 814.547331] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.548178] env[69796]: DEBUG nova.compute.manager [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.548336] env[69796]: DEBUG nova.network.neutron [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 814.587540] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.769034] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db9ccda-1cd5-45e0-a332-fca9e10c59c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.783991] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf07c54-0ac6-4a5e-8225-6d7506f07441 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.823845] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c4b288-9158-4846-9a09-240c5a188a7c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.831411] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd2adde-1f79-4379-be4b-08adcb58eb6b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.854867] env[69796]: DEBUG nova.compute.provider_tree [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 814.893278] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Acquiring lock "568ebebb-730e-40c4-a1a3-d03d7d4e5a85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.893445] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Lock "568ebebb-730e-40c4-a1a3-d03d7d4e5a85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.170136] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.388019] env[69796]: ERROR nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [req-780b1053-ce35-4a24-85d0-22c0116ce046] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-780b1053-ce35-4a24-85d0-22c0116ce046"}]} [ 815.388019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.244s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.388304] env[69796]: ERROR nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Traceback (most recent call last): [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] yield [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] self.set_inventory_for_provider( [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 815.388304] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-780b1053-ce35-4a24-85d0-22c0116ce046"}]} [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] During handling of the above exception, another exception occurred: [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Traceback (most recent call last): [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] with self.rt.instance_claim(context, instance, node, allocs, [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 815.388491] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] return f(*args, **kwargs) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] self._update(elevated, cn) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] self._update_to_placement(context, compute_node, startup) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] return attempt.get(self._wrap_exception) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] six.reraise(self.value[0], self.value[1], self.value[2]) [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] raise value [ 815.388711] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] self.reportclient.update_from_provider_tree( [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] with catch_all(pd.uuid): [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] self.gen.throw(typ, value, traceback) [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] raise exception.ResourceProviderSyncFailed() [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 815.388996] env[69796]: ERROR nova.compute.manager [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] [ 815.389249] env[69796]: DEBUG nova.compute.utils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 815.389249] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.194s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.389249] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.389249] env[69796]: INFO nova.compute.manager [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] [instance: 8b103adc-9903-406f-8fd1-e193e00cde11] Successfully reverted task state from None on failure for instance. [ 815.391351] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.967s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.392803] env[69796]: INFO nova.compute.claims [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server [None req-a510478c-ae1e-4f67-bec1-42de05d27486 tempest-TenantUsagesTestJSON-1751090999 tempest-TenantUsagesTestJSON-1751090999-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server yield [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c858343c-cbad-4758-a435-c107eeadecab"}]} [ 815.396242] env[69796]: ERROR oslo_messaging.rpc.server [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 815.396561] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 815.396935] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 815.397574] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 815.397988] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 815.398715] env[69796]: ERROR oslo_messaging.rpc.server [ 815.398715] env[69796]: DEBUG nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Build of instance 0067f7d5-8349-473d-b0e8-e396026b2393 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 815.398715] env[69796]: DEBUG nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 815.399349] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Acquiring lock "refresh_cache-0067f7d5-8349-473d-b0e8-e396026b2393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.399349] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Acquired lock "refresh_cache-0067f7d5-8349-473d-b0e8-e396026b2393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.399349] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 815.659353] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "3cebd244-f9e7-4360-8249-4e1720c4d557" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.659578] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "3cebd244-f9e7-4360-8249-4e1720c4d557" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.931876] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 815.973440] env[69796]: DEBUG nova.compute.manager [req-7ba43e2c-d630-44e0-841a-bde4f1fd024a req-ac18709c-e361-4adb-b730-78b6e5a0fd1d service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Received event network-vif-deleted-65b8c00f-8a84-4930-a8b4-c7a8e994421b {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 815.973869] env[69796]: INFO nova.compute.manager [req-7ba43e2c-d630-44e0-841a-bde4f1fd024a req-ac18709c-e361-4adb-b730-78b6e5a0fd1d service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Neutron deleted interface 65b8c00f-8a84-4930-a8b4-c7a8e994421b; detaching it from the instance and deleting it from the info cache [ 815.974375] env[69796]: DEBUG nova.network.neutron [req-7ba43e2c-d630-44e0-841a-bde4f1fd024a req-ac18709c-e361-4adb-b730-78b6e5a0fd1d service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.052076] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.082681] env[69796]: DEBUG nova.network.neutron [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.429429] env[69796]: DEBUG nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 816.445770] env[69796]: DEBUG nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 816.446192] env[69796]: DEBUG nova.compute.provider_tree [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 816.459616] env[69796]: DEBUG nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 816.479761] env[69796]: DEBUG nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 816.482993] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-975c2f3b-4f27-4ad5-9b07-bd5e32902264 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.496078] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8182db-8185-4ea8-8ea5-81ec36ed2a54 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.528638] env[69796]: DEBUG nova.compute.manager [req-7ba43e2c-d630-44e0-841a-bde4f1fd024a req-ac18709c-e361-4adb-b730-78b6e5a0fd1d service nova] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Detach interface failed, port_id=65b8c00f-8a84-4930-a8b4-c7a8e994421b, reason: Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 816.554535] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Releasing lock "refresh_cache-0067f7d5-8349-473d-b0e8-e396026b2393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.554655] env[69796]: DEBUG nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 816.554810] env[69796]: DEBUG nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 816.554997] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 816.582265] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 816.586208] env[69796]: INFO nova.compute.manager [-] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Took 2.04 seconds to deallocate network for instance. [ 816.749413] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Volume attach. Driver type: vmdk {{(pid=69796) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 816.749950] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837815', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'name': 'volume-84c4c325-a200-464b-96ea-621279364fc9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a0e9a08-1176-4f88-bbcd-f0f52d3d7714', 'attached_at': '', 'detached_at': '', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'serial': '84c4c325-a200-464b-96ea-621279364fc9'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 816.751267] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ba9d6e-f494-4259-915d-6f363fc4ab61 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.783835] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc675da6-b491-4ddf-8e93-82743fd106bb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.814898] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfiguring VM instance instance-0000000d to attach disk [localhost-esx-install-datastore (2)] volume-84c4c325-a200-464b-96ea-621279364fc9/volume-84c4c325-a200-464b-96ea-621279364fc9.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.818265] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5824227c-515d-4a90-a09e-ab6ed588ea6f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.838326] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 816.838326] env[69796]: value = "task-4234393" [ 816.838326] env[69796]: _type = "Task" [ 816.838326] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.849763] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234393, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.085980] env[69796]: DEBUG nova.network.neutron [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.093971] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.104091] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57df399f-153f-45ad-89c8-311e2046e560 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.117693] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f557b8-a2b9-4142-8847-34084bcde902 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.151811] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6533f0bd-3e61-459c-b67d-3be3fdc1b8a9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.165189] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Acquiring lock "0bbd7678-014c-4f77-8608-277bce12410d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.165189] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Lock "0bbd7678-014c-4f77-8608-277bce12410d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.166105] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6829bd67-3da5-41c2-affc-26e2c2aa1468 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.184186] env[69796]: DEBUG nova.compute.provider_tree [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 817.354699] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234393, 'name': ReconfigVM_Task, 'duration_secs': 0.236142} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.355078] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfigured VM instance instance-0000000d to attach disk [localhost-esx-install-datastore (2)] volume-84c4c325-a200-464b-96ea-621279364fc9/volume-84c4c325-a200-464b-96ea-621279364fc9.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.359931] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e3077d-dd23-47dd-b0fc-48b459b3afb9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.376066] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 817.376066] env[69796]: value = "task-4234395" [ 817.376066] env[69796]: _type = "Task" [ 817.376066] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.385496] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234395, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.591712] env[69796]: INFO nova.compute.manager [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] [instance: 0067f7d5-8349-473d-b0e8-e396026b2393] Took 1.04 seconds to deallocate network for instance. [ 817.712204] env[69796]: ERROR nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [req-8b870502-acb6-4272-849e-ccf3b87f64b8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8b870502-acb6-4272-849e-ccf3b87f64b8"}]} [ 817.712675] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.321s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.713409] env[69796]: ERROR nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Traceback (most recent call last): [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] yield [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] self.set_inventory_for_provider( [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 817.713409] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8b870502-acb6-4272-849e-ccf3b87f64b8"}]} [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] During handling of the above exception, another exception occurred: [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Traceback (most recent call last): [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] with self.rt.instance_claim(context, instance, node, allocs, [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 817.713703] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] return f(*args, **kwargs) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] self._update(elevated, cn) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] self._update_to_placement(context, compute_node, startup) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] return attempt.get(self._wrap_exception) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] six.reraise(self.value[0], self.value[1], self.value[2]) [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] raise value [ 817.713953] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] self.reportclient.update_from_provider_tree( [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] with catch_all(pd.uuid): [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] self.gen.throw(typ, value, traceback) [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] raise exception.ResourceProviderSyncFailed() [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 817.714278] env[69796]: ERROR nova.compute.manager [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] [ 817.715089] env[69796]: DEBUG nova.compute.utils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 817.715834] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.388s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.717424] env[69796]: INFO nova.compute.claims [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.720229] env[69796]: DEBUG nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Build of instance df6e15b9-640f-40c2-a146-4361de14f8b0 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 817.720590] env[69796]: DEBUG nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 817.720917] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquiring lock "refresh_cache-df6e15b9-640f-40c2-a146-4361de14f8b0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.720917] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Acquired lock "refresh_cache-df6e15b9-640f-40c2-a146-4361de14f8b0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.721275] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.888176] env[69796]: DEBUG oslo_vmware.api [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234395, 'name': ReconfigVM_Task, 'duration_secs': 0.15745} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.888497] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837815', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'name': 'volume-84c4c325-a200-464b-96ea-621279364fc9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a0e9a08-1176-4f88-bbcd-f0f52d3d7714', 'attached_at': '', 'detached_at': '', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'serial': '84c4c325-a200-464b-96ea-621279364fc9'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 818.261114] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.317013] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.638856] env[69796]: INFO nova.scheduler.client.report [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Deleted allocations for instance 0067f7d5-8349-473d-b0e8-e396026b2393 [ 818.759934] env[69796]: DEBUG nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 818.778756] env[69796]: DEBUG nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 818.779082] env[69796]: DEBUG nova.compute.provider_tree [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.796125] env[69796]: DEBUG nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 818.822027] env[69796]: DEBUG nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 818.822646] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Releasing lock "refresh_cache-df6e15b9-640f-40c2-a146-4361de14f8b0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.822854] env[69796]: DEBUG nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 818.823064] env[69796]: DEBUG nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 818.823228] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.945890] env[69796]: DEBUG nova.objects.instance [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lazy-loading 'flavor' on Instance uuid 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.089750] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.150883] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8963a449-73c2-4a59-a92d-c3ec45ba3107 tempest-InstanceActionsNegativeTestJSON-1052599922 tempest-InstanceActionsNegativeTestJSON-1052599922-project-member] Lock "0067f7d5-8349-473d-b0e8-e396026b2393" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.245s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.392250] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba53fdb3-2047-4fbc-b065-814363bce665 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.399857] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cb602f-8436-42fb-9f56-fec169a39e73 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.431796] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c3bdc3-fc07-4219-978f-aa907ce2a490 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.441021] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87947a6-e474-41db-9a9e-06807bbb851d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.459339] env[69796]: DEBUG nova.compute.provider_tree [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 819.460711] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b6c32b7d-4d9e-4a6e-b9a0-6c3007de4afd tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.356s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.596053] env[69796]: DEBUG nova.network.neutron [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.655811] env[69796]: DEBUG nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 819.991354] env[69796]: ERROR nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [req-d44a088c-570b-402e-803e-1b9dff1b130d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d44a088c-570b-402e-803e-1b9dff1b130d"}]} [ 819.991818] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.276s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.992406] env[69796]: ERROR nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] Traceback (most recent call last): [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] yield [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] self.set_inventory_for_provider( [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 819.992406] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-d44a088c-570b-402e-803e-1b9dff1b130d"}]} [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] During handling of the above exception, another exception occurred: [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] Traceback (most recent call last): [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] with self.rt.instance_claim(context, instance, node, allocs, [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 819.992641] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] return f(*args, **kwargs) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] self._update(elevated, cn) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] self._update_to_placement(context, compute_node, startup) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] return attempt.get(self._wrap_exception) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] six.reraise(self.value[0], self.value[1], self.value[2]) [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] raise value [ 819.992953] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] self.reportclient.update_from_provider_tree( [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] with catch_all(pd.uuid): [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] self.gen.throw(typ, value, traceback) [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] raise exception.ResourceProviderSyncFailed() [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 819.993249] env[69796]: ERROR nova.compute.manager [instance: f92627d1-b895-4564-b975-2a596b6dd814] [ 819.993493] env[69796]: DEBUG nova.compute.utils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 819.994425] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.268s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.995162] env[69796]: DEBUG nova.objects.instance [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lazy-loading 'resources' on Instance uuid a4a16667-cd00-4850-9389-0bd57c7efd74 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.996130] env[69796]: DEBUG nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Build of instance f92627d1-b895-4564-b975-2a596b6dd814 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 819.996547] env[69796]: DEBUG nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 819.996766] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "refresh_cache-f92627d1-b895-4564-b975-2a596b6dd814" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.996909] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquired lock "refresh_cache-f92627d1-b895-4564-b975-2a596b6dd814" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.997126] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.100123] env[69796]: INFO nova.compute.manager [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] [instance: df6e15b9-640f-40c2-a146-4361de14f8b0] Took 1.28 seconds to deallocate network for instance. [ 820.141254] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.141530] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.192156] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.521201] env[69796]: DEBUG nova.scheduler.client.report [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 820.527552] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.538267] env[69796]: DEBUG nova.scheduler.client.report [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 820.538504] env[69796]: DEBUG nova.compute.provider_tree [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.559893] env[69796]: DEBUG nova.scheduler.client.report [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 820.585545] env[69796]: DEBUG nova.scheduler.client.report [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 820.647199] env[69796]: INFO nova.compute.manager [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Detaching volume 84c4c325-a200-464b-96ea-621279364fc9 [ 820.679635] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.708511] env[69796]: INFO nova.virt.block_device [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Attempting to driver detach volume 84c4c325-a200-464b-96ea-621279364fc9 from mountpoint /dev/sdb [ 820.709253] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Volume detach. Driver type: vmdk {{(pid=69796) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 820.709253] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837815', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'name': 'volume-84c4c325-a200-464b-96ea-621279364fc9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a0e9a08-1176-4f88-bbcd-f0f52d3d7714', 'attached_at': '', 'detached_at': '', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'serial': '84c4c325-a200-464b-96ea-621279364fc9'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 820.710407] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cf26f0-937c-4db9-80b4-8b99d1b28bde {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.740812] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fb59fb-43fc-4ece-b81a-079a28b55740 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.750515] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ab29ff-e9a0-429b-893e-23432f2caa19 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.784654] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1026ecd1-1d6e-4167-ad69-7dbdf16c6bdf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.805575] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore (2)] volume-84c4c325-a200-464b-96ea-621279364fc9/volume-84c4c325-a200-464b-96ea-621279364fc9.vmdk. No consolidation needed. {{(pid=69796) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 820.811275] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 820.814029] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e25aa9d1-1fd5-4eab-b963-9e9f7d3d772c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.833295] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 820.833295] env[69796]: value = "task-4234397" [ 820.833295] env[69796]: _type = "Task" [ 820.833295] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.848958] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234397, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.105222] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Acquiring lock "82c2040c-9ada-4d77-88b1-453545c66b61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.105477] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Lock "82c2040c-9ada-4d77-88b1-453545c66b61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.135262] env[69796]: INFO nova.scheduler.client.report [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Deleted allocations for instance df6e15b9-640f-40c2-a146-4361de14f8b0 [ 821.185625] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Releasing lock "refresh_cache-f92627d1-b895-4564-b975-2a596b6dd814" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.185862] env[69796]: DEBUG nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 821.186132] env[69796]: DEBUG nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 821.186354] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.207588] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e0c593-1bb7-4fe3-b4f0-0b6d07638fa2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.215656] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.218212] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc5ab0-7f30-406e-89b4-3d34c3495bdf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.252213] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50228ef7-eaeb-4c33-89c6-7d3b3dc0cb65 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.261141] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda82f2f-1e09-4c4a-85b3-718e90e330d2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.278188] env[69796]: DEBUG nova.compute.provider_tree [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 821.344615] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234397, 'name': ReconfigVM_Task, 'duration_secs': 0.20188} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.344826] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 821.349648] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94f7ed06-cdf1-46ee-94f5-12b0516469be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.369901] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 821.369901] env[69796]: value = "task-4234398" [ 821.369901] env[69796]: _type = "Task" [ 821.369901] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.381093] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234398, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.645177] env[69796]: DEBUG oslo_concurrency.lockutils [None req-465a125f-95e5-424d-931a-94510c20c4ea tempest-ListImageFiltersTestJSON-94860031 tempest-ListImageFiltersTestJSON-94860031-project-member] Lock "df6e15b9-640f-40c2-a146-4361de14f8b0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.076s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.722408] env[69796]: DEBUG nova.network.neutron [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.808982] env[69796]: ERROR nova.scheduler.client.report [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [req-c33f86e1-35c7-4889-a92a-18992172e433] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c33f86e1-35c7-4889-a92a-18992172e433"}]} [ 821.809380] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.815s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.809974] env[69796]: ERROR nova.compute.manager [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Traceback (most recent call last): [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] yield [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self.set_inventory_for_provider( [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 821.809974] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c33f86e1-35c7-4889-a92a-18992172e433"}]} [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] During handling of the above exception, another exception occurred: [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Traceback (most recent call last): [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self._delete_instance(context, instance, bdms) [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 821.810209] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self._complete_deletion(context, instance) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self._update_resource_tracker(context, instance) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self.rt.update_usage(context, instance, instance.node) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] return f(*args, **kwargs) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self._update(context.elevated(), self.compute_nodes[nodename]) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self._update_to_placement(context, compute_node, startup) [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 821.810506] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] return attempt.get(self._wrap_exception) [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] six.reraise(self.value[0], self.value[1], self.value[2]) [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] raise value [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self.reportclient.update_from_provider_tree( [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] with catch_all(pd.uuid): [ 821.810879] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 821.811288] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] self.gen.throw(typ, value, traceback) [ 821.811288] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 821.811288] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] raise exception.ResourceProviderSyncFailed() [ 821.811288] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.811288] env[69796]: ERROR nova.compute.manager [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] [ 821.813040] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.229s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.813320] env[69796]: DEBUG oslo_concurrency.lockutils [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.813460] env[69796]: INFO nova.compute.manager [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] [instance: 38792225-b054-4c08-b3ec-51d46287b0f9] Successfully reverted task state from None on failure for instance. [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server [None req-fd110588-3c82-4d75-a125-e7d1029d2e3b tempest-ServersAdmin275Test-11799016 tempest-ServersAdmin275Test-11799016-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server yield [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-cab7c74b-e124-448c-8d77-f2b344981dcb"}]} [ 821.821273] env[69796]: ERROR oslo_messaging.rpc.server [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 821.821584] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 821.821977] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 821.823204] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 821.823629] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 821.823991] env[69796]: ERROR oslo_messaging.rpc.server [ 821.823991] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.930s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.823991] env[69796]: INFO nova.compute.claims [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.881057] env[69796]: DEBUG oslo_vmware.api [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234398, 'name': ReconfigVM_Task, 'duration_secs': 0.142104} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.881342] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837815', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'name': 'volume-84c4c325-a200-464b-96ea-621279364fc9', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a0e9a08-1176-4f88-bbcd-f0f52d3d7714', 'attached_at': '', 'detached_at': '', 'volume_id': '84c4c325-a200-464b-96ea-621279364fc9', 'serial': '84c4c325-a200-464b-96ea-621279364fc9'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 822.148223] env[69796]: DEBUG nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 822.226595] env[69796]: INFO nova.compute.manager [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: f92627d1-b895-4564-b975-2a596b6dd814] Took 1.04 seconds to deallocate network for instance. [ 822.331486] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.285s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.446570] env[69796]: DEBUG nova.objects.instance [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lazy-loading 'flavor' on Instance uuid 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.680374] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.858933] env[69796]: DEBUG nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 822.881250] env[69796]: DEBUG nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 822.881487] env[69796]: DEBUG nova.compute.provider_tree [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 822.899162] env[69796]: DEBUG nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 822.923382] env[69796]: DEBUG nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 823.179315] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Acquiring lock "d9b2601d-1ebb-4609-90f3-180adb00c6bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.179774] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Lock "d9b2601d-1ebb-4609-90f3-180adb00c6bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.262929] env[69796]: INFO nova.scheduler.client.report [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Deleted allocations for instance f92627d1-b895-4564-b975-2a596b6dd814 [ 823.468289] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f2f0695-9454-4db0-8c84-23470955cb1e tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.324s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.561335] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be9047f-2ef1-406d-9ff4-be85b296d292 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.572412] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3905810-ab33-4199-91d2-1cef22ed804d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.605591] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6405fd4-4442-4cb9-88ba-3bdeeec263f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.613912] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f48d0644-0847-4241-885d-a0a8da30dcfe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.632144] env[69796]: DEBUG nova.compute.provider_tree [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 823.780108] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b9680286-6107-4b05-9a63-f4416e45fdef tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "f92627d1-b895-4564-b975-2a596b6dd814" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.800s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.846469] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.145282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Acquiring lock "bec0df68-c72b-4ecd-9a03-c8bf02f8059e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.145282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Lock "bec0df68-c72b-4ecd-9a03-c8bf02f8059e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.165348] env[69796]: ERROR nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [req-7b481e49-3a8e-4c69-a1f0-32ecf37c31e6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7b481e49-3a8e-4c69-a1f0-32ecf37c31e6"}]} [ 824.165792] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.166443] env[69796]: ERROR nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Traceback (most recent call last): [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] yield [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] self.set_inventory_for_provider( [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 824.166443] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7b481e49-3a8e-4c69-a1f0-32ecf37c31e6"}]} [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] During handling of the above exception, another exception occurred: [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Traceback (most recent call last): [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] with self.rt.instance_claim(context, instance, node, allocs, [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 824.166670] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] return f(*args, **kwargs) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] self._update(elevated, cn) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] self._update_to_placement(context, compute_node, startup) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] return attempt.get(self._wrap_exception) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] six.reraise(self.value[0], self.value[1], self.value[2]) [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] raise value [ 824.166927] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] self.reportclient.update_from_provider_tree( [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] with catch_all(pd.uuid): [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] self.gen.throw(typ, value, traceback) [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] raise exception.ResourceProviderSyncFailed() [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 824.167254] env[69796]: ERROR nova.compute.manager [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] [ 824.168788] env[69796]: DEBUG nova.compute.utils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 824.170094] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 21.873s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.170094] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.170094] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 824.170282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.308s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.171765] env[69796]: INFO nova.compute.claims [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.176848] env[69796]: DEBUG nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Build of instance 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 824.177359] env[69796]: DEBUG nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 824.177594] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Acquiring lock "refresh_cache-55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.177745] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Acquired lock "refresh_cache-55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.177908] env[69796]: DEBUG nova.network.neutron [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.179531] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9dceee-af13-4da1-af66-875aaa1404be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.189636] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16da4916-c48f-45d5-8f0d-8b87f1cc2fab {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.208031] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba83a8ca-1d9a-443a-8b57-5bc0e3e6c2f0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.215891] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c60518-2443-435a-8a99-194216f8b50c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.249328] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179990MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 824.249488] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.282563] env[69796]: DEBUG nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.714931] env[69796]: DEBUG nova.network.neutron [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.806810] env[69796]: DEBUG nova.network.neutron [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.812504] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.123487] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "4de7ecb8-c591-430d-8e87-70749358f05d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.123726] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "4de7ecb8-c591-430d-8e87-70749358f05d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.214888] env[69796]: DEBUG nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 825.247140] env[69796]: DEBUG nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 825.247140] env[69796]: DEBUG nova.compute.provider_tree [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.261296] env[69796]: DEBUG nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 825.284175] env[69796]: DEBUG nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 825.315939] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Releasing lock "refresh_cache-55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.316377] env[69796]: DEBUG nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 825.316573] env[69796]: DEBUG nova.compute.manager [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] [instance: 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 825.908950] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a618f6-80ab-4aee-8b15-3ca6213059fc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.919919] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a754092-d1c7-4535-a5e4-604636e7af01 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.956714] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4349691-4254-4b23-88e8-29b3d2a6faf8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.965364] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8619999b-f1b1-421a-9c51-5238748e87be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.980069] env[69796]: DEBUG nova.compute.provider_tree [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.356021] env[69796]: INFO nova.scheduler.client.report [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Deleted allocations for instance 55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2 [ 826.503023] env[69796]: ERROR nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [req-15313a4e-04ad-4b47-83fa-3b6061219385] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-15313a4e-04ad-4b47-83fa-3b6061219385"}]} [ 826.503915] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.333s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.504942] env[69796]: ERROR nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Traceback (most recent call last): [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] yield [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] self.set_inventory_for_provider( [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 826.504942] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-15313a4e-04ad-4b47-83fa-3b6061219385"}]} [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] During handling of the above exception, another exception occurred: [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Traceback (most recent call last): [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] with self.rt.instance_claim(context, instance, node, allocs, [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 826.505360] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] return f(*args, **kwargs) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] self._update(elevated, cn) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] self._update_to_placement(context, compute_node, startup) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] return attempt.get(self._wrap_exception) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] raise value [ 826.505657] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] self.reportclient.update_from_provider_tree( [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] with catch_all(pd.uuid): [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] self.gen.throw(typ, value, traceback) [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] raise exception.ResourceProviderSyncFailed() [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 826.506026] env[69796]: ERROR nova.compute.manager [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] [ 826.507233] env[69796]: DEBUG nova.compute.utils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 826.508901] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.283s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.510530] env[69796]: INFO nova.compute.claims [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.514525] env[69796]: DEBUG nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Build of instance a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 826.515479] env[69796]: DEBUG nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 826.515769] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquiring lock "refresh_cache-a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.515954] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Acquired lock "refresh_cache-a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.517128] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.523369] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "6237eecf-7560-45c1-9fcd-6bd2a0747e7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.523672] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "6237eecf-7560-45c1-9fcd-6bd2a0747e7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.863727] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8cf278a5-19eb-465e-901f-a1183fa395e9 tempest-ServersListShow298Test-1312071047 tempest-ServersListShow298Test-1312071047-project-member] Lock "55fb4b38-d8ec-45a4-89b8-8642ad5bb1a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.720s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.041766] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.104807] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.367260] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.550382] env[69796]: DEBUG nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 827.565591] env[69796]: DEBUG nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 827.565818] env[69796]: DEBUG nova.compute.provider_tree [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 827.580212] env[69796]: DEBUG nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 827.607313] env[69796]: DEBUG nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 827.609687] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Releasing lock "refresh_cache-a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.609892] env[69796]: DEBUG nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 827.610698] env[69796]: DEBUG nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 827.612813] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.630345] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.895857] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.133825] env[69796]: DEBUG nova.network.neutron [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.146091] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d716db3b-b1bd-4adc-8954-65275dabb2e7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.154353] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b539a64-5fb6-45ad-83f7-1a9f90d90c4d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.189786] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fffb96f-f14e-4dd2-9711-16eef97a40d0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.203187] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c74b8f7-90b2-43e5-9f3a-8fd96bdd6db3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.219327] env[69796]: DEBUG nova.compute.provider_tree [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 828.636223] env[69796]: INFO nova.compute.manager [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] [instance: a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d] Took 1.03 seconds to deallocate network for instance. [ 828.741510] env[69796]: ERROR nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [req-64ed401f-4791-4db3-a91f-67d8fba3d6a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-64ed401f-4791-4db3-a91f-67d8fba3d6a1"}]} [ 828.741510] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.742134] env[69796]: ERROR nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Traceback (most recent call last): [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] yield [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] self.set_inventory_for_provider( [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 828.742134] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-64ed401f-4791-4db3-a91f-67d8fba3d6a1"}]} [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] During handling of the above exception, another exception occurred: [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Traceback (most recent call last): [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] with self.rt.instance_claim(context, instance, node, allocs, [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 828.742381] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] return f(*args, **kwargs) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] self._update(elevated, cn) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] self._update_to_placement(context, compute_node, startup) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] return attempt.get(self._wrap_exception) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] six.reraise(self.value[0], self.value[1], self.value[2]) [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] raise value [ 828.742656] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] self.reportclient.update_from_provider_tree( [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] with catch_all(pd.uuid): [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] self.gen.throw(typ, value, traceback) [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] raise exception.ResourceProviderSyncFailed() [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 828.743023] env[69796]: ERROR nova.compute.manager [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] [ 828.743343] env[69796]: DEBUG nova.compute.utils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 828.744608] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.840s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.745806] env[69796]: INFO nova.compute.claims [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.748794] env[69796]: DEBUG nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Build of instance 834de465-9bef-4f8f-8bf6-9d39bc437f58 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 828.749221] env[69796]: DEBUG nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 828.749438] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Acquiring lock "refresh_cache-834de465-9bef-4f8f-8bf6-9d39bc437f58" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.749581] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Acquired lock "refresh_cache-834de465-9bef-4f8f-8bf6-9d39bc437f58" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.749730] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 829.280395] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.371214] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.677623] env[69796]: INFO nova.scheduler.client.report [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Deleted allocations for instance a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d [ 829.784828] env[69796]: DEBUG nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 829.802481] env[69796]: DEBUG nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 829.802689] env[69796]: DEBUG nova.compute.provider_tree [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.814883] env[69796]: DEBUG nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 829.835627] env[69796]: DEBUG nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 829.877035] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Releasing lock "refresh_cache-834de465-9bef-4f8f-8bf6-9d39bc437f58" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.877035] env[69796]: DEBUG nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 829.877035] env[69796]: DEBUG nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.877035] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.897224] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.191043] env[69796]: DEBUG oslo_concurrency.lockutils [None req-31a23fd7-9720-48a8-a817-eb1c489593ed tempest-MigrationsAdminTest-949512837 tempest-MigrationsAdminTest-949512837-project-member] Lock "a2baa6a4-a3a2-475b-bef1-1c98dbe3c13d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.802s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.362019] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2014dcb6-33de-4323-83c2-7325e4d4ecb9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.372544] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f985b78-a916-410d-8a7e-c2b8eaa7306a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.405914] env[69796]: DEBUG nova.network.neutron [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.408206] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015b26e2-337c-437b-b486-7faf821f5d08 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.417969] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359d9aff-f127-4ad0-b218-01a25bd07fbf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.434754] env[69796]: DEBUG nova.compute.provider_tree [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.694112] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.912410] env[69796]: INFO nova.compute.manager [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] [instance: 834de465-9bef-4f8f-8bf6-9d39bc437f58] Took 1.04 seconds to deallocate network for instance. [ 830.965764] env[69796]: ERROR nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [req-c602c3c0-da41-4e3d-ac08-8fb031782e32] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c602c3c0-da41-4e3d-ac08-8fb031782e32"}]} [ 830.966220] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.966832] env[69796]: ERROR nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Traceback (most recent call last): [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] yield [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] self.set_inventory_for_provider( [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 830.966832] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c602c3c0-da41-4e3d-ac08-8fb031782e32"}]} [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] During handling of the above exception, another exception occurred: [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Traceback (most recent call last): [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] with self.rt.instance_claim(context, instance, node, allocs, [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 830.967219] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] return f(*args, **kwargs) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] self._update(elevated, cn) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] self._update_to_placement(context, compute_node, startup) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] return attempt.get(self._wrap_exception) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] six.reraise(self.value[0], self.value[1], self.value[2]) [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] raise value [ 830.967695] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] self.reportclient.update_from_provider_tree( [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] with catch_all(pd.uuid): [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] self.gen.throw(typ, value, traceback) [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] raise exception.ResourceProviderSyncFailed() [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 830.968082] env[69796]: ERROR nova.compute.manager [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] [ 830.968372] env[69796]: DEBUG nova.compute.utils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 830.968878] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.887s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.970467] env[69796]: INFO nova.compute.claims [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.974346] env[69796]: DEBUG nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Build of instance 95857f3f-1503-44d0-a1b3-d087bde80393 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 830.974817] env[69796]: DEBUG nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 830.975025] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Acquiring lock "refresh_cache-95857f3f-1503-44d0-a1b3-d087bde80393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.975201] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Acquired lock "refresh_cache-95857f3f-1503-44d0-a1b3-d087bde80393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.975369] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.220718] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.510341] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.647137] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.959551] env[69796]: INFO nova.scheduler.client.report [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Deleted allocations for instance 834de465-9bef-4f8f-8bf6-9d39bc437f58 [ 832.001222] env[69796]: DEBUG nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 832.015820] env[69796]: DEBUG nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 832.016058] env[69796]: DEBUG nova.compute.provider_tree [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.041568] env[69796]: DEBUG nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 832.066537] env[69796]: DEBUG nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 832.150473] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Releasing lock "refresh_cache-95857f3f-1503-44d0-a1b3-d087bde80393" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.150792] env[69796]: DEBUG nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 832.150988] env[69796]: DEBUG nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 832.151178] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.168381] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.472628] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f977276-f1d4-4579-9e81-4506b429fc22 tempest-ServerActionsTestJSON-2097779956 tempest-ServerActionsTestJSON-2097779956-project-member] Lock "834de465-9bef-4f8f-8bf6-9d39bc437f58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.779s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.583292] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550af4e0-a601-423f-825f-5eb7e0117dc9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.598020] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad82037-ce5d-47e0-872d-fa822581e36d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.632399] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a821eb22-2d65-40fe-9f14-5626abaeb5ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.640707] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e122d41-f1f9-44fd-8784-07ed7a326cdc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.658242] env[69796]: DEBUG nova.compute.provider_tree [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 832.671062] env[69796]: DEBUG nova.network.neutron [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.975234] env[69796]: DEBUG nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 833.178062] env[69796]: INFO nova.compute.manager [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] [instance: 95857f3f-1503-44d0-a1b3-d087bde80393] Took 1.02 seconds to deallocate network for instance. [ 833.182534] env[69796]: ERROR nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [req-2a720417-3a18-4165-a8d5-7f5f8dca4b07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2a720417-3a18-4165-a8d5-7f5f8dca4b07"}]} [ 833.182827] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 833.183490] env[69796]: ERROR nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Traceback (most recent call last): [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] yield [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] self.set_inventory_for_provider( [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 833.183490] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2a720417-3a18-4165-a8d5-7f5f8dca4b07"}]} [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] During handling of the above exception, another exception occurred: [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Traceback (most recent call last): [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] with self.rt.instance_claim(context, instance, node, allocs, [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 833.183731] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] return f(*args, **kwargs) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] self._update(elevated, cn) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] self._update_to_placement(context, compute_node, startup) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] return attempt.get(self._wrap_exception) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] six.reraise(self.value[0], self.value[1], self.value[2]) [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] raise value [ 833.183987] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] self.reportclient.update_from_provider_tree( [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] with catch_all(pd.uuid): [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] self.gen.throw(typ, value, traceback) [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] raise exception.ResourceProviderSyncFailed() [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 833.184738] env[69796]: ERROR nova.compute.manager [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] [ 833.185013] env[69796]: DEBUG nova.compute.utils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 833.185440] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.338s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.187750] env[69796]: INFO nova.compute.claims [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.190826] env[69796]: DEBUG nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Build of instance d3620cfb-dd10-4276-b65d-b6041e83ac49 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 833.191037] env[69796]: DEBUG nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 833.191210] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquiring lock "refresh_cache-d3620cfb-dd10-4276-b65d-b6041e83ac49" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.191807] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Acquired lock "refresh_cache-d3620cfb-dd10-4276-b65d-b6041e83ac49" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.191807] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.373308] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Acquiring lock "09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.373308] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Lock "09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 833.499682] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 833.725352] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 833.889776] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.087106] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "d3002060-482c-4307-845e-5f00b085d06b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.087106] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "d3002060-482c-4307-845e-5f00b085d06b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.220041] env[69796]: INFO nova.scheduler.client.report [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Deleted allocations for instance 95857f3f-1503-44d0-a1b3-d087bde80393 [ 834.226722] env[69796]: DEBUG nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 834.252758] env[69796]: DEBUG nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 834.253057] env[69796]: DEBUG nova.compute.provider_tree [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 834.267416] env[69796]: DEBUG nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 834.288405] env[69796]: DEBUG nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 834.395017] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Releasing lock "refresh_cache-d3620cfb-dd10-4276-b65d-b6041e83ac49" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.395017] env[69796]: DEBUG nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 834.395017] env[69796]: DEBUG nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 834.395017] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 834.414412] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 834.732044] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c879474e-f427-4361-8b45-40b47d65dc79 tempest-ServerAddressesTestJSON-1968688224 tempest-ServerAddressesTestJSON-1968688224-project-member] Lock "95857f3f-1503-44d0-a1b3-d087bde80393" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.622s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.782870] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40574105-ecfb-499a-b4ce-285038a264df {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.792679] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79f06c0-890f-483a-9af2-439a39fbcace {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.555965] env[69796]: DEBUG nova.network.neutron [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.557504] env[69796]: DEBUG nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.561072] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81023e9b-fec1-4da1-bf7d-afb87eaf99d8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.570647] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6221b0a8-5028-4c21-94c0-a688d84dfedf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.587274] env[69796]: DEBUG nova.compute.provider_tree [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 836.060997] env[69796]: INFO nova.compute.manager [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] [instance: d3620cfb-dd10-4276-b65d-b6041e83ac49] Took 1.67 seconds to deallocate network for instance. [ 836.088721] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.116614] env[69796]: ERROR nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [req-00446948-3b25-4d37-82ef-72286313fb1b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-00446948-3b25-4d37-82ef-72286313fb1b"}]} [ 836.116999] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.932s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.118015] env[69796]: ERROR nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Traceback (most recent call last): [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] yield [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] self.set_inventory_for_provider( [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 836.118015] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-00446948-3b25-4d37-82ef-72286313fb1b"}]} [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] During handling of the above exception, another exception occurred: [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Traceback (most recent call last): [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] with self.rt.instance_claim(context, instance, node, allocs, [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 836.118239] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] return f(*args, **kwargs) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] self._update(elevated, cn) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] self._update_to_placement(context, compute_node, startup) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] return attempt.get(self._wrap_exception) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] raise value [ 836.118564] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] self.reportclient.update_from_provider_tree( [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] with catch_all(pd.uuid): [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] self.gen.throw(typ, value, traceback) [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] raise exception.ResourceProviderSyncFailed() [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.118930] env[69796]: ERROR nova.compute.manager [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] [ 836.119258] env[69796]: DEBUG nova.compute.utils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 836.119865] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.656s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.120070] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.120241] env[69796]: INFO nova.compute.manager [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] [instance: f0d4f167-344a-4828-9f6e-8a62ed8e064d] Successfully reverted task state from None on failure for instance. [ 836.122392] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.537s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.123847] env[69796]: INFO nova.compute.claims [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.126630] env[69796]: DEBUG nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Build of instance c5b49cf2-4316-43bf-90d3-8e6da14dc5d7 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 836.127055] env[69796]: DEBUG nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 836.127284] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Acquiring lock "refresh_cache-c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.127433] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Acquired lock "refresh_cache-c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.127592] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server [None req-2309303b-458d-4c5b-95ae-006e28afbf91 tempest-ServersAdminTestJSON-67121385 tempest-ServersAdminTestJSON-67121385-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server yield [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-99ed3f50-8edc-4970-8489-82efa8356569"}]} [ 836.129159] env[69796]: ERROR oslo_messaging.rpc.server [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 836.129505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 836.129927] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 836.130357] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 836.130776] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 836.131184] env[69796]: ERROR oslo_messaging.rpc.server [ 836.651323] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 836.733443] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.100711] env[69796]: INFO nova.scheduler.client.report [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Deleted allocations for instance d3620cfb-dd10-4276-b65d-b6041e83ac49 [ 837.158931] env[69796]: DEBUG nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 837.175116] env[69796]: DEBUG nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 837.175427] env[69796]: DEBUG nova.compute.provider_tree [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 837.188422] env[69796]: DEBUG nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 837.209486] env[69796]: DEBUG nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 837.236052] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Releasing lock "refresh_cache-c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.236453] env[69796]: DEBUG nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 837.236665] env[69796]: DEBUG nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 837.236836] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 837.260492] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.614720] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3a8d1023-ecde-4fc2-8f31-3819ca0faf9c tempest-ServersTestMultiNic-1867870935 tempest-ServersTestMultiNic-1867870935-project-member] Lock "d3620cfb-dd10-4276-b65d-b6041e83ac49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.184s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.763479] env[69796]: DEBUG nova.network.neutron [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.793034] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142f72b0-8827-4377-a4a9-73336efc4535 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.804269] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8320835-707b-49d3-a517-f736b1820465 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.865467] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78f414a-f433-48ac-9097-1e1bf55c9f77 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.879946] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9504b7d2-2fc7-4250-8d06-a0de938fa33b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.906129] env[69796]: DEBUG nova.compute.provider_tree [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 838.123657] env[69796]: DEBUG nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.268168] env[69796]: INFO nova.compute.manager [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] [instance: c5b49cf2-4316-43bf-90d3-8e6da14dc5d7] Took 1.03 seconds to deallocate network for instance. [ 838.435957] env[69796]: ERROR nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [req-26d6768a-5950-423b-bce7-aec90964883c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-26d6768a-5950-423b-bce7-aec90964883c"}]} [ 838.435957] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.436094] env[69796]: ERROR nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Traceback (most recent call last): [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] yield [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] self.set_inventory_for_provider( [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 838.436094] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-26d6768a-5950-423b-bce7-aec90964883c"}]} [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] During handling of the above exception, another exception occurred: [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Traceback (most recent call last): [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] with self.rt.instance_claim(context, instance, node, allocs, [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 838.436332] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] return f(*args, **kwargs) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] self._update(elevated, cn) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] self._update_to_placement(context, compute_node, startup) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] return attempt.get(self._wrap_exception) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] six.reraise(self.value[0], self.value[1], self.value[2]) [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] raise value [ 838.436612] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] self.reportclient.update_from_provider_tree( [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] with catch_all(pd.uuid): [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] self.gen.throw(typ, value, traceback) [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] raise exception.ResourceProviderSyncFailed() [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.436962] env[69796]: ERROR nova.compute.manager [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] [ 838.437255] env[69796]: DEBUG nova.compute.utils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 838.438630] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.269s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.439900] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.439900] env[69796]: INFO nova.compute.manager [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] [instance: 3020e505-513b-4b29-996a-6e70a212f508] Successfully reverted task state from None on failure for instance. [ 838.442038] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.348s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.442038] env[69796]: DEBUG nova.objects.instance [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lazy-loading 'resources' on Instance uuid 836605ee-50cb-48b0-ba2e-33db3832f8ba {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.447927] env[69796]: DEBUG nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Build of instance 6bbe1196-a61f-4260-bddd-64f578acf1dc was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 838.447927] env[69796]: DEBUG nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 838.447927] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Acquiring lock "refresh_cache-6bbe1196-a61f-4260-bddd-64f578acf1dc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.447927] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Acquired lock "refresh_cache-6bbe1196-a61f-4260-bddd-64f578acf1dc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.448152] env[69796]: DEBUG nova.network.neutron [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server [None req-6f776a54-b38b-4339-b105-db70f2488ca0 tempest-ServerAddressesNegativeTestJSON-668879773 tempest-ServerAddressesNegativeTestJSON-668879773-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server yield [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-df35df7f-9b99-4fa8-bcad-c572b13588c9"}]} [ 838.449069] env[69796]: ERROR oslo_messaging.rpc.server [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 838.449408] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 838.449843] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 838.450299] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 838.450713] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 838.451179] env[69796]: ERROR oslo_messaging.rpc.server [ 838.650293] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.968105] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 838.979235] env[69796]: DEBUG nova.network.neutron [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 838.984236] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 838.984509] env[69796]: DEBUG nova.compute.provider_tree [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.003617] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 839.032395] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 839.075521] env[69796]: DEBUG nova.network.neutron [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.308907] env[69796]: INFO nova.scheduler.client.report [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Deleted allocations for instance c5b49cf2-4316-43bf-90d3-8e6da14dc5d7 [ 839.541221] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6851cf39-abe5-4658-88c4-8547538a579e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.553169] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0270bd-e3ab-4155-baa4-08d3aea89c71 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.592542] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Releasing lock "refresh_cache-6bbe1196-a61f-4260-bddd-64f578acf1dc" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.592792] env[69796]: DEBUG nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 839.593097] env[69796]: DEBUG nova.compute.manager [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] [instance: 6bbe1196-a61f-4260-bddd-64f578acf1dc] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 839.596888] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d049a3-90a6-4b88-b495-4063b904b05e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.606139] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595ab688-7970-4b86-83e0-fd35a5a5397f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.624075] env[69796]: DEBUG nova.compute.provider_tree [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 839.823106] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f21bbb11-4a1c-4dbc-976e-22fee6162d70 tempest-FloatingIPsAssociationTestJSON-5872790 tempest-FloatingIPsAssociationTestJSON-5872790-project-member] Lock "c5b49cf2-4316-43bf-90d3-8e6da14dc5d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.767s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.162544] env[69796]: ERROR nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [req-1eb21342-77b4-42ff-aaa0-782d987b3981] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1eb21342-77b4-42ff-aaa0-782d987b3981"}]} [ 840.162927] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.722s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.163688] env[69796]: ERROR nova.compute.manager [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Traceback (most recent call last): [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] yield [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self.set_inventory_for_provider( [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 840.163688] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1eb21342-77b4-42ff-aaa0-782d987b3981"}]} [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] During handling of the above exception, another exception occurred: [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Traceback (most recent call last): [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self._delete_instance(context, instance, bdms) [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 840.163922] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self._complete_deletion(context, instance) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self._update_resource_tracker(context, instance) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self.rt.update_usage(context, instance, instance.node) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] return f(*args, **kwargs) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self._update(context.elevated(), self.compute_nodes[nodename]) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self._update_to_placement(context, compute_node, startup) [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 840.164189] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] return attempt.get(self._wrap_exception) [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] six.reraise(self.value[0], self.value[1], self.value[2]) [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] raise value [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self.reportclient.update_from_provider_tree( [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] with catch_all(pd.uuid): [ 840.164589] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 840.164941] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] self.gen.throw(typ, value, traceback) [ 840.164941] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 840.164941] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] raise exception.ResourceProviderSyncFailed() [ 840.164941] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 840.164941] env[69796]: ERROR nova.compute.manager [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] [ 840.167626] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.976s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.169255] env[69796]: INFO nova.compute.claims [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.325809] env[69796]: DEBUG nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 840.649676] env[69796]: INFO nova.scheduler.client.report [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Deleted allocations for instance 6bbe1196-a61f-4260-bddd-64f578acf1dc [ 840.669305] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.787s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.853619] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.165654] env[69796]: DEBUG oslo_concurrency.lockutils [None req-34721117-63e9-418d-a138-3f91dbdc4d6d tempest-ServersListShow2100Test-2140509799 tempest-ServersListShow2100Test-2140509799-project-member] Lock "6bbe1196-a61f-4260-bddd-64f578acf1dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.542s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.220278] env[69796]: DEBUG nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 841.240387] env[69796]: DEBUG nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 841.240387] env[69796]: DEBUG nova.compute.provider_tree [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 841.254317] env[69796]: DEBUG nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 841.288055] env[69796]: DEBUG nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 841.671120] env[69796]: DEBUG nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 842.002068] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb8f771-28a9-4ce8-bdc1-57d408e5a559 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.016920] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b051d72-4753-4292-9b43-0967b06b4e46 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.053377] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5c1ca5-9387-4abe-ad4b-5d46a3c3bc10 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.062801] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c139d6ca-5214-4eca-8d92-06bb74f6eaea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.084037] env[69796]: DEBUG nova.compute.provider_tree [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 842.196650] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.207760] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.610368] env[69796]: ERROR nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [req-9e2a4f0d-7b59-4266-ad1d-04946c4188e1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9e2a4f0d-7b59-4266-ad1d-04946c4188e1"}]} [ 842.610748] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.443s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.611785] env[69796]: ERROR nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Traceback (most recent call last): [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] yield [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] self.set_inventory_for_provider( [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 842.611785] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9e2a4f0d-7b59-4266-ad1d-04946c4188e1"}]} [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] During handling of the above exception, another exception occurred: [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Traceback (most recent call last): [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] with self.rt.instance_claim(context, instance, node, allocs, [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 842.612010] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] return f(*args, **kwargs) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] self._update(elevated, cn) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] self._update_to_placement(context, compute_node, startup) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] return attempt.get(self._wrap_exception) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] six.reraise(self.value[0], self.value[1], self.value[2]) [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] raise value [ 842.612258] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] self.reportclient.update_from_provider_tree( [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] with catch_all(pd.uuid): [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] self.gen.throw(typ, value, traceback) [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] raise exception.ResourceProviderSyncFailed() [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 842.612557] env[69796]: ERROR nova.compute.manager [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] [ 842.613079] env[69796]: DEBUG nova.compute.utils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 842.614410] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.935s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.616974] env[69796]: INFO nova.compute.claims [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.622808] env[69796]: DEBUG nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Build of instance efee7c1e-4d8c-450c-924b-9d7d15095740 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 842.626143] env[69796]: DEBUG nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 842.626143] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "refresh_cache-efee7c1e-4d8c-450c-924b-9d7d15095740" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.626143] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquired lock "refresh_cache-efee7c1e-4d8c-450c-924b-9d7d15095740" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.626143] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.169284] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.274705] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Acquiring lock "0236eafc-d173-4ccf-ba01-5341c01fb5cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.275463] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Lock "0236eafc-d173-4ccf-ba01-5341c01fb5cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.591517] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.671872] env[69796]: DEBUG nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 843.694233] env[69796]: DEBUG nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 843.694517] env[69796]: DEBUG nova.compute.provider_tree [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 843.708296] env[69796]: DEBUG nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 843.733861] env[69796]: DEBUG nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 844.097527] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Releasing lock "refresh_cache-efee7c1e-4d8c-450c-924b-9d7d15095740" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.097840] env[69796]: DEBUG nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 844.098045] env[69796]: DEBUG nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 844.098223] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 844.142951] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.318318] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dfc210-f8a0-4431-9643-63c2ba9fdeeb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.328187] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbdf51c-bdc1-4955-967d-aef1a7e8870c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.367407] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c85268-471b-4cf1-bb14-0d332589fcd4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.377995] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458afb1d-0ca2-4dbe-8085-cc550d414647 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.394152] env[69796]: DEBUG nova.compute.provider_tree [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 844.647117] env[69796]: DEBUG nova.network.neutron [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.920434] env[69796]: ERROR nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [req-1d163db7-d1c5-40f1-8424-a7174e1e7e71] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1d163db7-d1c5-40f1-8424-a7174e1e7e71"}]} [ 844.920834] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.921492] env[69796]: ERROR nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Traceback (most recent call last): [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] yield [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] self.set_inventory_for_provider( [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 844.921492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1d163db7-d1c5-40f1-8424-a7174e1e7e71"}]} [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] During handling of the above exception, another exception occurred: [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Traceback (most recent call last): [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] with self.rt.instance_claim(context, instance, node, allocs, [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 844.921809] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] return f(*args, **kwargs) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] self._update(elevated, cn) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] self._update_to_placement(context, compute_node, startup) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] return attempt.get(self._wrap_exception) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] raise value [ 844.922098] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] self.reportclient.update_from_provider_tree( [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] with catch_all(pd.uuid): [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] self.gen.throw(typ, value, traceback) [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] raise exception.ResourceProviderSyncFailed() [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 844.922492] env[69796]: ERROR nova.compute.manager [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] [ 844.923376] env[69796]: DEBUG nova.compute.utils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 844.928240] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.080s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.928240] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.928240] env[69796]: INFO nova.compute.manager [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] [instance: a4a16667-cd00-4850-9389-0bd57c7efd74] Successfully reverted task state from None on failure for instance. [ 844.930548] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.681s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.932547] env[69796]: DEBUG nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Build of instance 62354021-035a-48b1-b22c-bd12cadbdacd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 844.932988] env[69796]: DEBUG nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 844.933252] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "refresh_cache-62354021-035a-48b1-b22c-bd12cadbdacd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.933429] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquired lock "refresh_cache-62354021-035a-48b1-b22c-bd12cadbdacd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.933591] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server [None req-a0c86707-c072-44b6-8545-89ab30bed3c7 tempest-ServersAdminNegativeTestJSON-280202454 tempest-ServersAdminNegativeTestJSON-280202454-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server yield [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-c33f86e1-35c7-4889-a92a-18992172e433"}]} [ 844.941032] env[69796]: ERROR oslo_messaging.rpc.server [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 844.941441] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 844.942028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 844.942597] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 844.943162] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 844.943744] env[69796]: ERROR oslo_messaging.rpc.server [ 845.151395] env[69796]: INFO nova.compute.manager [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: efee7c1e-4d8c-450c-924b-9d7d15095740] Took 1.05 seconds to deallocate network for instance. [ 845.466683] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.596354] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.100451] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Releasing lock "refresh_cache-62354021-035a-48b1-b22c-bd12cadbdacd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.100451] env[69796]: DEBUG nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 846.100451] env[69796]: DEBUG nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 846.100451] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 846.121080] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.198373] env[69796]: INFO nova.scheduler.client.report [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Deleted allocations for instance efee7c1e-4d8c-450c-924b-9d7d15095740 [ 846.625348] env[69796]: DEBUG nova.network.neutron [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.710627] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b46a1e46-bde7-4a63-bb68-b9a3a0f06b78 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "efee7c1e-4d8c-450c-924b-9d7d15095740" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.918s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.127616] env[69796]: INFO nova.compute.manager [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: 62354021-035a-48b1-b22c-bd12cadbdacd] Took 1.03 seconds to deallocate network for instance. [ 847.219491] env[69796]: DEBUG nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.748707] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.171593] env[69796]: INFO nova.scheduler.client.report [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Deleted allocations for instance 62354021-035a-48b1-b22c-bd12cadbdacd [ 848.685711] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f6287be1-96ca-498c-96f9-1ede8b786d4e tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "62354021-035a-48b1-b22c-bd12cadbdacd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.891s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.188268] env[69796]: DEBUG nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.729613] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.510448] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.510703] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.510740] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.510847] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511022] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511078] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511196] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511316] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511701] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d746d66b-32df-4a4d-97bd-82b4ad364461 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511701] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 850.511701] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 851.020409] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f1f47f34-d16d-4eba-907f-08d707683941 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 851.156689] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Acquiring lock "9c91466a-5057-4dbf-ad3b-c84120d8435a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.156689] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Lock "9c91466a-5057-4dbf-ad3b-c84120d8435a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.523649] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance be94a635-f83f-46a2-957d-bc07e2e8abe6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 852.027791] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d2187050-87df-4167-b5e4-2a21a31145be has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 852.529646] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9e84f3cf-fae6-474c-b86b-7cd67d986d46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 853.032488] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a754473f-2fb1-4018-9b61-9983bff07bd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 853.536887] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e7c0df98-424a-45c4-9bb6-1daf148dcb04 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 854.039904] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f5fb74e8-1197-4314-8fa4-2d0a3d231ad4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 854.542861] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f6bd68f4-3eb2-4203-bc00-2a5c7927cfac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 855.046929] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 568ebebb-730e-40c4-a1a3-d03d7d4e5a85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 855.550928] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3cebd244-f9e7-4360-8249-4e1720c4d557 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 856.055054] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 0bbd7678-014c-4f77-8608-277bce12410d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 856.498501] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "af3f3f51-5368-43b7-b69b-44fe28004777" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.498731] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "af3f3f51-5368-43b7-b69b-44fe28004777" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.559033] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 82c2040c-9ada-4d77-88b1-453545c66b61 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 857.062842] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d9b2601d-1ebb-4609-90f3-180adb00c6bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 857.566177] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance bec0df68-c72b-4ecd-9a03-c8bf02f8059e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 858.070874] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 4de7ecb8-c591-430d-8e87-70749358f05d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 858.573975] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 6237eecf-7560-45c1-9fcd-6bd2a0747e7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 859.077718] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 859.580745] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d3002060-482c-4307-845e-5f00b085d06b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 860.084065] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 0236eafc-d173-4ccf-ba01-5341c01fb5cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 860.084359] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 860.084546] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 860.104519] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 860.121863] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 860.122135] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 860.134896] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 860.157821] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 860.591842] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdd94d6-7eaf-473c-a6ff-ad1373cd0e48 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.600875] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11a19ae-06b0-4039-9582-162adcec1d3e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.637018] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4ef9a5-fb1f-4604-b5d9-8a0109faef8b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.645262] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36d3261-c441-4162-9ff1-cb9af2e484b6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.659600] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 861.180253] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-e0b84a09-10d8-4713-bc26-2eee903d3ae5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0b84a09-10d8-4713-bc26-2eee903d3ae5"}]} [ 861.180978] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 16.250s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.181547] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 861.181547] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 861.181547] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 861.181547] env[69796]: ERROR nova.compute.manager yield [ 861.181547] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 861.181547] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 861.181547] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 861.181547] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 861.181547] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0b84a09-10d8-4713-bc26-2eee903d3ae5"}]} [ 861.181547] env[69796]: ERROR nova.compute.manager [ 861.181547] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 861.181547] env[69796]: ERROR nova.compute.manager [ 861.181983] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 861.181983] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 861.181983] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 861.181983] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 861.181983] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 861.181983] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 861.181983] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 861.181983] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 861.181983] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 861.181983] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 861.182475] env[69796]: ERROR nova.compute.manager raise value [ 861.182475] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 861.182475] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 861.182475] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 861.182475] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 861.182475] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 861.182475] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 861.182475] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 861.182475] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 861.182475] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 861.182475] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 861.182475] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 861.182475] env[69796]: ERROR nova.compute.manager [ 861.182475] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.369s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.183522] env[69796]: INFO nova.compute.claims [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.207343] env[69796]: DEBUG nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 862.219872] env[69796]: DEBUG nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 862.220112] env[69796]: DEBUG nova.compute.provider_tree [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 862.230021] env[69796]: DEBUG nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 862.246606] env[69796]: DEBUG nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 862.615032] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41785351-e4fa-4676-a412-5df50da0affa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.623274] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977d9f4a-001b-4a13-879e-275354195840 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.654793] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870c5064-3981-4b82-9794-d9bbe8a97939 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.663196] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0df060-cf79-479e-a0e0-df534fc68650 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.679676] env[69796]: DEBUG nova.compute.provider_tree [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.200917] env[69796]: ERROR nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [req-e65ec3d4-b8e1-4e8e-8417-8ac7a2d5fe9a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e65ec3d4-b8e1-4e8e-8417-8ac7a2d5fe9a"}]} [ 863.201317] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.019s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.202321] env[69796]: ERROR nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] Traceback (most recent call last): [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] yield [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] self.set_inventory_for_provider( [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 863.202321] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e65ec3d4-b8e1-4e8e-8417-8ac7a2d5fe9a"}]} [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] During handling of the above exception, another exception occurred: [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] Traceback (most recent call last): [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] with self.rt.instance_claim(context, instance, node, allocs, [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 863.202563] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] return f(*args, **kwargs) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] self._update(elevated, cn) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] self._update_to_placement(context, compute_node, startup) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] return attempt.get(self._wrap_exception) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] six.reraise(self.value[0], self.value[1], self.value[2]) [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] raise value [ 863.202822] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] self.reportclient.update_from_provider_tree( [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] with catch_all(pd.uuid): [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] self.gen.throw(typ, value, traceback) [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] raise exception.ResourceProviderSyncFailed() [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 863.203173] env[69796]: ERROR nova.compute.manager [instance: f1f47f34-d16d-4eba-907f-08d707683941] [ 863.203458] env[69796]: DEBUG nova.compute.utils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 863.204277] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.309s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.205969] env[69796]: INFO nova.compute.claims [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 863.209059] env[69796]: DEBUG nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Build of instance f1f47f34-d16d-4eba-907f-08d707683941 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 863.209422] env[69796]: DEBUG nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 863.209644] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquiring lock "refresh_cache-f1f47f34-d16d-4eba-907f-08d707683941" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.209790] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Acquired lock "refresh_cache-f1f47f34-d16d-4eba-907f-08d707683941" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.209949] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 863.730369] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 863.826930] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.235715] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 864.251935] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 864.252268] env[69796]: DEBUG nova.compute.provider_tree [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.264072] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 864.282951] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 864.331054] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Releasing lock "refresh_cache-f1f47f34-d16d-4eba-907f-08d707683941" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.331342] env[69796]: DEBUG nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 864.331477] env[69796]: DEBUG nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 864.332483] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 864.348044] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 864.701774] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e245ae-f6ea-4783-b6d7-0175fe28d10d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.710497] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1060c077-aeb9-4467-9ca4-b404db7a246e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.746238] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4adeeca8-daee-462a-99f5-d746a67ed689 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.754673] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de947f4-809c-45de-9c51-cf5278c814de {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.769284] env[69796]: DEBUG nova.compute.provider_tree [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 864.851643] env[69796]: DEBUG nova.network.neutron [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.291746] env[69796]: ERROR nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [req-6ae9d15e-8d5e-4b95-abe9-1f5b61fe62db] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6ae9d15e-8d5e-4b95-abe9-1f5b61fe62db"}]} [ 865.292189] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.088s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.292740] env[69796]: ERROR nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Traceback (most recent call last): [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] yield [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] self.set_inventory_for_provider( [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 865.292740] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6ae9d15e-8d5e-4b95-abe9-1f5b61fe62db"}]} [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] During handling of the above exception, another exception occurred: [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Traceback (most recent call last): [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] with self.rt.instance_claim(context, instance, node, allocs, [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 865.293077] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] return f(*args, **kwargs) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] self._update(elevated, cn) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] self._update_to_placement(context, compute_node, startup) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] return attempt.get(self._wrap_exception) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] raise value [ 865.293479] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] self.reportclient.update_from_provider_tree( [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] with catch_all(pd.uuid): [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] self.gen.throw(typ, value, traceback) [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] raise exception.ResourceProviderSyncFailed() [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 865.294017] env[69796]: ERROR nova.compute.manager [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] [ 865.294405] env[69796]: DEBUG nova.compute.utils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 865.294745] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.074s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.296459] env[69796]: INFO nova.compute.claims [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.299393] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Build of instance be94a635-f83f-46a2-957d-bc07e2e8abe6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 865.299807] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 865.300061] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "refresh_cache-be94a635-f83f-46a2-957d-bc07e2e8abe6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.300292] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquired lock "refresh_cache-be94a635-f83f-46a2-957d-bc07e2e8abe6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.300542] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.353551] env[69796]: INFO nova.compute.manager [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] [instance: f1f47f34-d16d-4eba-907f-08d707683941] Took 1.02 seconds to deallocate network for instance. [ 865.828083] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.900608] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.326641] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 866.339704] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 866.339947] env[69796]: DEBUG nova.compute.provider_tree [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.350649] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 866.368330] env[69796]: DEBUG nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 866.390148] env[69796]: INFO nova.scheduler.client.report [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Deleted allocations for instance f1f47f34-d16d-4eba-907f-08d707683941 [ 866.403839] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Releasing lock "refresh_cache-be94a635-f83f-46a2-957d-bc07e2e8abe6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.404193] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 866.404402] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 866.404607] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.423531] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.746178] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc775020-d40f-407f-b854-fe9ccaef2614 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.758121] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3f8993-04c3-4ee7-8074-ac8e275df578 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.804683] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9a2364-a474-4dde-9aa6-a77300e88511 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.813048] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0851dbd-4797-403a-bfdc-c0d055704378 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.827411] env[69796]: DEBUG nova.compute.provider_tree [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 866.901757] env[69796]: DEBUG oslo_concurrency.lockutils [None req-289f2e92-9348-44ec-a5c4-8573dd1376f7 tempest-ListServerFiltersTestJSON-1547866523 tempest-ListServerFiltersTestJSON-1547866523-project-member] Lock "f1f47f34-d16d-4eba-907f-08d707683941" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.086s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.927012] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.353754] env[69796]: ERROR nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [req-84c3e74e-bf49-4bfc-854d-d8ffa211b781] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-84c3e74e-bf49-4bfc-854d-d8ffa211b781"}]} [ 867.354161] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.059s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.354849] env[69796]: ERROR nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] Traceback (most recent call last): [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] yield [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] self.set_inventory_for_provider( [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 867.354849] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-84c3e74e-bf49-4bfc-854d-d8ffa211b781"}]} [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] During handling of the above exception, another exception occurred: [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] Traceback (most recent call last): [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] with self.rt.instance_claim(context, instance, node, allocs, [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 867.355196] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] return f(*args, **kwargs) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] self._update(elevated, cn) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] self._update_to_placement(context, compute_node, startup) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] return attempt.get(self._wrap_exception) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] six.reraise(self.value[0], self.value[1], self.value[2]) [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] raise value [ 867.355656] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] self.reportclient.update_from_provider_tree( [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] with catch_all(pd.uuid): [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] self.gen.throw(typ, value, traceback) [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] raise exception.ResourceProviderSyncFailed() [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 867.356354] env[69796]: ERROR nova.compute.manager [instance: d2187050-87df-4167-b5e4-2a21a31145be] [ 867.356992] env[69796]: DEBUG nova.compute.utils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 867.357843] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.858s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.359422] env[69796]: INFO nova.compute.claims [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.365950] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Build of instance d2187050-87df-4167-b5e4-2a21a31145be was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 867.366392] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 867.366641] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "refresh_cache-d2187050-87df-4167-b5e4-2a21a31145be" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.366801] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquired lock "refresh_cache-d2187050-87df-4167-b5e4-2a21a31145be" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.366960] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.403812] env[69796]: DEBUG nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 867.429205] env[69796]: INFO nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: be94a635-f83f-46a2-957d-bc07e2e8abe6] Took 1.02 seconds to deallocate network for instance. [ 867.900537] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 867.925135] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.022277] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.395317] env[69796]: DEBUG nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 868.410210] env[69796]: DEBUG nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 868.410432] env[69796]: DEBUG nova.compute.provider_tree [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 868.422014] env[69796]: DEBUG nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 868.441048] env[69796]: DEBUG nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 868.465278] env[69796]: INFO nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Deleted allocations for instance be94a635-f83f-46a2-957d-bc07e2e8abe6 [ 868.525033] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Releasing lock "refresh_cache-d2187050-87df-4167-b5e4-2a21a31145be" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.525288] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 868.525475] env[69796]: DEBUG nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 868.525673] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 868.544174] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.808700] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dedf324b-5455-4d55-a2b8-e0f6a6265b7e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.817455] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f12826-926e-4705-aabb-db422c0dbf9a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.849704] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775a6a49-2e91-4101-920b-fbf176a81ec3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.857798] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540abbc8-db66-4b0c-bd54-36fff961c5d3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.873033] env[69796]: DEBUG nova.compute.provider_tree [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 868.974949] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "be94a635-f83f-46a2-957d-bc07e2e8abe6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.180s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.046933] env[69796]: DEBUG nova.network.neutron [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.398032] env[69796]: ERROR nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [req-dc7f4f2e-5b77-483e-8179-7779ae730376] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dc7f4f2e-5b77-483e-8179-7779ae730376"}]} [ 869.398032] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.040s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.398396] env[69796]: ERROR nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Traceback (most recent call last): [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] yield [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] self.set_inventory_for_provider( [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 869.398396] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dc7f4f2e-5b77-483e-8179-7779ae730376"}]} [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] During handling of the above exception, another exception occurred: [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Traceback (most recent call last): [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] with self.rt.instance_claim(context, instance, node, allocs, [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 869.398870] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] return f(*args, **kwargs) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] self._update(elevated, cn) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] self._update_to_placement(context, compute_node, startup) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] return attempt.get(self._wrap_exception) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] six.reraise(self.value[0], self.value[1], self.value[2]) [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] raise value [ 869.399151] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] self.reportclient.update_from_provider_tree( [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] with catch_all(pd.uuid): [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] self.gen.throw(typ, value, traceback) [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] raise exception.ResourceProviderSyncFailed() [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 869.399494] env[69796]: ERROR nova.compute.manager [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] [ 869.399796] env[69796]: DEBUG nova.compute.utils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 869.400140] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.312s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.401669] env[69796]: INFO nova.compute.claims [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.408135] env[69796]: DEBUG nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Build of instance 9e84f3cf-fae6-474c-b86b-7cd67d986d46 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 869.408572] env[69796]: DEBUG nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 869.408802] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Acquiring lock "refresh_cache-9e84f3cf-fae6-474c-b86b-7cd67d986d46" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.408951] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Acquired lock "refresh_cache-9e84f3cf-fae6-474c-b86b-7cd67d986d46" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.409127] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 869.476962] env[69796]: DEBUG nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 869.550228] env[69796]: INFO nova.compute.manager [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: d2187050-87df-4167-b5e4-2a21a31145be] Took 1.02 seconds to deallocate network for instance. [ 869.934459] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.005597] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.058908] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.432182] env[69796]: DEBUG nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 870.448008] env[69796]: DEBUG nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 870.448240] env[69796]: DEBUG nova.compute.provider_tree [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.459972] env[69796]: DEBUG nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 870.478111] env[69796]: DEBUG nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 870.563181] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Releasing lock "refresh_cache-9e84f3cf-fae6-474c-b86b-7cd67d986d46" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.563383] env[69796]: DEBUG nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 870.563595] env[69796]: DEBUG nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 870.563771] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 870.583709] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.592029] env[69796]: INFO nova.scheduler.client.report [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Deleted allocations for instance d2187050-87df-4167-b5e4-2a21a31145be [ 870.895625] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0281cb0c-bf4c-40fc-9a20-53b8e4032393 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.904512] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a69278-25e2-4d7e-aeb3-1491711bc046 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.938877] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dd8bfa-5593-45f2-9469-6a7e3f1478b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.947542] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051671a3-7bed-47da-9510-f3f42e16eaf2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.961581] env[69796]: DEBUG nova.compute.provider_tree [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 871.086647] env[69796]: DEBUG nova.network.neutron [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.102605] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f97b63d2-2cd9-4b33-98f6-729492b1954d tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "d2187050-87df-4167-b5e4-2a21a31145be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.261s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.487328] env[69796]: ERROR nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [req-4579ffcc-e134-46e1-99cf-7829b4867dd4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4579ffcc-e134-46e1-99cf-7829b4867dd4"}]} [ 871.487713] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.088s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.488339] env[69796]: ERROR nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Traceback (most recent call last): [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] yield [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] self.set_inventory_for_provider( [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 871.488339] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4579ffcc-e134-46e1-99cf-7829b4867dd4"}]} [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] During handling of the above exception, another exception occurred: [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Traceback (most recent call last): [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] with self.rt.instance_claim(context, instance, node, allocs, [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 871.488565] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] return f(*args, **kwargs) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] self._update(elevated, cn) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] self._update_to_placement(context, compute_node, startup) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] return attempt.get(self._wrap_exception) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] raise value [ 871.488809] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] self.reportclient.update_from_provider_tree( [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] with catch_all(pd.uuid): [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] self.gen.throw(typ, value, traceback) [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] raise exception.ResourceProviderSyncFailed() [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 871.489151] env[69796]: ERROR nova.compute.manager [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] [ 871.489454] env[69796]: DEBUG nova.compute.utils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 871.491375] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.841s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.492997] env[69796]: INFO nova.compute.claims [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.495885] env[69796]: DEBUG nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Build of instance a754473f-2fb1-4018-9b61-9983bff07bd5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 871.496315] env[69796]: DEBUG nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 871.496558] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Acquiring lock "refresh_cache-a754473f-2fb1-4018-9b61-9983bff07bd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.496723] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Acquired lock "refresh_cache-a754473f-2fb1-4018-9b61-9983bff07bd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.496879] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 871.589495] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "6b1e871c-0971-4c37-a852-14fea283c815" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.589495] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "6b1e871c-0971-4c37-a852-14fea283c815" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.593092] env[69796]: INFO nova.compute.manager [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] [instance: 9e84f3cf-fae6-474c-b86b-7cd67d986d46] Took 1.03 seconds to deallocate network for instance. [ 871.605341] env[69796]: DEBUG nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 871.643162] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "fd090570-97f0-4afc-a512-eb4be373c51c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.643646] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "fd090570-97f0-4afc-a512-eb4be373c51c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.675744] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "da71a468-a227-493a-b4d3-d92b7626b18c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.675995] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "da71a468-a227-493a-b4d3-d92b7626b18c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.022182] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 872.112760] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.130856] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.522583] env[69796]: DEBUG nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 872.537284] env[69796]: DEBUG nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 872.537622] env[69796]: DEBUG nova.compute.provider_tree [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 872.550152] env[69796]: DEBUG nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 872.569408] env[69796]: DEBUG nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 872.605875] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "6777b20d-7cfe-44a0-aaff-c58318ad88e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.606129] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "6777b20d-7cfe-44a0-aaff-c58318ad88e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.618088] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Releasing lock "refresh_cache-a754473f-2fb1-4018-9b61-9983bff07bd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.618471] env[69796]: DEBUG nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 872.618471] env[69796]: DEBUG nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 872.618651] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 872.624752] env[69796]: INFO nova.scheduler.client.report [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Deleted allocations for instance 9e84f3cf-fae6-474c-b86b-7cd67d986d46 [ 872.635175] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "951b3b77-765e-41c8-866e-b0bb4bd45559" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.635449] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "951b3b77-765e-41c8-866e-b0bb4bd45559" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.648571] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.004882] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c0f812-5a91-4aec-9ade-2cfede842ff9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.013338] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f21f2a-8f0a-4f9d-8e96-2f278a23e9d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.045580] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca7e4c1-67cb-4eed-979f-59148eaf517c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.053850] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1052f034-a17f-4e7a-b585-53c5c031f680 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.068208] env[69796]: DEBUG nova.compute.provider_tree [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 873.139443] env[69796]: DEBUG oslo_concurrency.lockutils [None req-85204a66-185f-4cd1-b9fa-2e615a6521a1 tempest-InstanceActionsV221TestJSON-437963817 tempest-InstanceActionsV221TestJSON-437963817-project-member] Lock "9e84f3cf-fae6-474c-b86b-7cd67d986d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.354s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.151345] env[69796]: DEBUG nova.network.neutron [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.592622] env[69796]: ERROR nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [req-7c7b2a34-e9d1-4581-a36e-da6bdfef3c11] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7c7b2a34-e9d1-4581-a36e-da6bdfef3c11"}]} [ 873.592989] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.102s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.593618] env[69796]: ERROR nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Traceback (most recent call last): [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] yield [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] self.set_inventory_for_provider( [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 873.593618] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7c7b2a34-e9d1-4581-a36e-da6bdfef3c11"}]} [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] During handling of the above exception, another exception occurred: [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Traceback (most recent call last): [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] with self.rt.instance_claim(context, instance, node, allocs, [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 873.593926] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] return f(*args, **kwargs) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] self._update(elevated, cn) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] self._update_to_placement(context, compute_node, startup) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] return attempt.get(self._wrap_exception) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] six.reraise(self.value[0], self.value[1], self.value[2]) [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] raise value [ 873.594295] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] self.reportclient.update_from_provider_tree( [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] with catch_all(pd.uuid): [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] self.gen.throw(typ, value, traceback) [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] raise exception.ResourceProviderSyncFailed() [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 873.594681] env[69796]: ERROR nova.compute.manager [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] [ 873.595092] env[69796]: DEBUG nova.compute.utils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 873.596040] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.743s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.598054] env[69796]: INFO nova.compute.claims [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.601237] env[69796]: DEBUG nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Build of instance e7c0df98-424a-45c4-9bb6-1daf148dcb04 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 873.601671] env[69796]: DEBUG nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 873.605019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Acquiring lock "refresh_cache-e7c0df98-424a-45c4-9bb6-1daf148dcb04" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.605019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Acquired lock "refresh_cache-e7c0df98-424a-45c4-9bb6-1daf148dcb04" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.605019] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.642418] env[69796]: DEBUG nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.655431] env[69796]: INFO nova.compute.manager [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] [instance: a754473f-2fb1-4018-9b61-9983bff07bd5] Took 1.04 seconds to deallocate network for instance. [ 874.127978] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.175161] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.212149] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.631964] env[69796]: DEBUG nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 874.647514] env[69796]: DEBUG nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 874.648042] env[69796]: DEBUG nova.compute.provider_tree [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.661940] env[69796]: DEBUG nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 874.685872] env[69796]: INFO nova.scheduler.client.report [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Deleted allocations for instance a754473f-2fb1-4018-9b61-9983bff07bd5 [ 874.694089] env[69796]: DEBUG nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 874.716084] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Releasing lock "refresh_cache-e7c0df98-424a-45c4-9bb6-1daf148dcb04" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.716442] env[69796]: DEBUG nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 874.716536] env[69796]: DEBUG nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 874.716706] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.740714] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 875.131695] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8264afe9-d83d-410f-8127-d0532c552edc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.140582] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e764a3-c744-4c4e-9650-f092c36219d4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.177684] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ee626d-7d75-460b-9155-ec48eaa1e5f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.186325] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3582e1ac-b0d6-4314-b795-9f7f29b9ca01 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.200711] env[69796]: DEBUG nova.compute.provider_tree [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.203850] env[69796]: DEBUG oslo_concurrency.lockutils [None req-14ae0915-5920-4ba5-b816-ce9903c7963e tempest-ServerActionsTestOtherB-883203743 tempest-ServerActionsTestOtherB-883203743-project-member] Lock "a754473f-2fb1-4018-9b61-9983bff07bd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.547s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.245342] env[69796]: DEBUG nova.network.neutron [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.707298] env[69796]: DEBUG nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.733092] env[69796]: ERROR nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [req-f26d565f-2ed2-47f4-996e-eacbdb931122] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f26d565f-2ed2-47f4-996e-eacbdb931122"}]} [ 875.733487] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.137s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.734102] env[69796]: ERROR nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Traceback (most recent call last): [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] yield [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] self.set_inventory_for_provider( [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 875.734102] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f26d565f-2ed2-47f4-996e-eacbdb931122"}]} [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] During handling of the above exception, another exception occurred: [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Traceback (most recent call last): [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] with self.rt.instance_claim(context, instance, node, allocs, [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 875.734363] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] return f(*args, **kwargs) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] self._update(elevated, cn) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] self._update_to_placement(context, compute_node, startup) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] return attempt.get(self._wrap_exception) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] six.reraise(self.value[0], self.value[1], self.value[2]) [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] raise value [ 875.734616] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] self.reportclient.update_from_provider_tree( [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] with catch_all(pd.uuid): [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] self.gen.throw(typ, value, traceback) [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] raise exception.ResourceProviderSyncFailed() [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 875.734942] env[69796]: ERROR nova.compute.manager [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] [ 875.735229] env[69796]: DEBUG nova.compute.utils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 875.736426] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.540s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.746021] env[69796]: DEBUG nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Build of instance f5fb74e8-1197-4314-8fa4-2d0a3d231ad4 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 875.746021] env[69796]: DEBUG nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 875.746021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquiring lock "refresh_cache-f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.746021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Acquired lock "refresh_cache-f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.746311] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.749635] env[69796]: INFO nova.compute.manager [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] [instance: e7c0df98-424a-45c4-9bb6-1daf148dcb04] Took 1.03 seconds to deallocate network for instance. [ 875.759925] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 875.786170] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 875.786399] env[69796]: DEBUG nova.compute.provider_tree [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 875.801315] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 875.829115] env[69796]: DEBUG nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 876.231056] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.292710] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.302041] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e5536f-d867-400f-8599-14d9ec41a5b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.313770] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e15444-29df-496a-92ce-931edc5445fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.351619] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89931d56-904e-4382-8914-ac87e8e8f720 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.359480] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec53b96-85b2-4204-9ff3-e959735acb94 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.374640] env[69796]: DEBUG nova.compute.provider_tree [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 876.404711] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.909212] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Releasing lock "refresh_cache-f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.909617] env[69796]: DEBUG nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 876.909617] env[69796]: DEBUG nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 876.910122] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 876.938370] env[69796]: INFO nova.scheduler.client.report [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Deleted allocations for instance e7c0df98-424a-45c4-9bb6-1daf148dcb04 [ 876.949590] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 876.953572] env[69796]: ERROR nova.scheduler.client.report [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [req-e23fe54b-ec5a-4b9a-8c64-5fa58c3a2656] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e23fe54b-ec5a-4b9a-8c64-5fa58c3a2656"}]}: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 876.953572] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.216s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.953757] env[69796]: WARNING nova.compute.manager [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] [instance: 836605ee-50cb-48b0-ba2e-33db3832f8ba] Failed to revert task state for instance. Error: Failed to synchronize the placement service with resource provider information supplied by the compute host.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 876.956431] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.749s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.967478] env[69796]: INFO nova.compute.claims [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server [None req-b06e3c02-7d6e-404f-a908-9658c5890eee tempest-ServersTestJSON-542552314 tempest-ServersTestJSON-542552314-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server yield [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 876.967478] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1eb21342-77b4-42ff-aaa0-782d987b3981"}]} [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 876.967955] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 876.968396] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 876.968835] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 876.969293] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 876.969746] env[69796]: ERROR oslo_messaging.rpc.server [ 877.463963] env[69796]: DEBUG nova.network.neutron [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.468301] env[69796]: DEBUG oslo_concurrency.lockutils [None req-09d46a23-f10c-431c-8462-ab2f3b7408ef tempest-ServersV294TestFqdnHostnames-581560284 tempest-ServersV294TestFqdnHostnames-581560284-project-member] Lock "e7c0df98-424a-45c4-9bb6-1daf148dcb04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.624s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.969273] env[69796]: INFO nova.compute.manager [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] [instance: f5fb74e8-1197-4314-8fa4-2d0a3d231ad4] Took 1.06 seconds to deallocate network for instance. [ 877.976299] env[69796]: DEBUG nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 878.010721] env[69796]: DEBUG nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 878.030358] env[69796]: DEBUG nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 878.030579] env[69796]: DEBUG nova.compute.provider_tree [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 878.048163] env[69796]: DEBUG nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 878.069502] env[69796]: DEBUG nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 878.514565] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.529394] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6b9254-076d-40e6-bdf9-2c52b7b2d67e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.538791] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1368e771-ddfc-4cd6-9e01-0b28e633f0b1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.576330] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5636f85d-eee8-4afb-b2c1-44a94f7e9e01 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.584962] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd9c0b8-db19-40ed-98de-6fc5a971c179 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.600532] env[69796]: DEBUG nova.compute.provider_tree [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 879.007334] env[69796]: INFO nova.scheduler.client.report [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Deleted allocations for instance f5fb74e8-1197-4314-8fa4-2d0a3d231ad4 [ 879.129846] env[69796]: ERROR nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [req-187a4d76-7c9a-4c00-b490-65087048532a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-187a4d76-7c9a-4c00-b490-65087048532a"}]} [ 879.130332] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.132029] env[69796]: ERROR nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Traceback (most recent call last): [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] yield [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] self.set_inventory_for_provider( [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 879.132029] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-187a4d76-7c9a-4c00-b490-65087048532a"}]} [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] During handling of the above exception, another exception occurred: [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Traceback (most recent call last): [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] with self.rt.instance_claim(context, instance, node, allocs, [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 879.132452] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] return f(*args, **kwargs) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] self._update(elevated, cn) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] self._update_to_placement(context, compute_node, startup) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] return attempt.get(self._wrap_exception) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] six.reraise(self.value[0], self.value[1], self.value[2]) [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] raise value [ 879.132844] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] self.reportclient.update_from_provider_tree( [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] with catch_all(pd.uuid): [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] self.gen.throw(typ, value, traceback) [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] raise exception.ResourceProviderSyncFailed() [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 879.133233] env[69796]: ERROR nova.compute.manager [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] [ 879.135430] env[69796]: DEBUG nova.compute.utils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 879.135430] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.385s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.135430] env[69796]: INFO nova.compute.claims [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 879.138362] env[69796]: DEBUG nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Build of instance f6bd68f4-3eb2-4203-bc00-2a5c7927cfac was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 879.138801] env[69796]: DEBUG nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 879.139067] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Acquiring lock "refresh_cache-f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.139201] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Acquired lock "refresh_cache-f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.139360] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.519831] env[69796]: DEBUG oslo_concurrency.lockutils [None req-61dfbf32-30b2-42a9-a6a2-f892cc5c435b tempest-SecurityGroupsTestJSON-646526291 tempest-SecurityGroupsTestJSON-646526291-project-member] Lock "f5fb74e8-1197-4314-8fa4-2d0a3d231ad4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.383s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.666795] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 879.744906] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.024157] env[69796]: DEBUG nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.170918] env[69796]: DEBUG nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 880.185361] env[69796]: DEBUG nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 880.185914] env[69796]: DEBUG nova.compute.provider_tree [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 880.198578] env[69796]: DEBUG nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 880.222783] env[69796]: DEBUG nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 880.248872] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Releasing lock "refresh_cache-f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.249210] env[69796]: DEBUG nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 880.249939] env[69796]: DEBUG nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.250349] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.277815] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.552702] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.725699] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6c4a7b-1620-4a7d-926a-5b91df81299d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.734981] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e44ecaf-d50a-47fb-9645-98b28f0c7808 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.767501] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f13e27-6637-45c6-80ea-377a7a942aae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.776284] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aa759d-a1c9-4c74-ab7e-9300b89d9373 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.780880] env[69796]: DEBUG nova.network.neutron [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.793721] env[69796]: DEBUG nova.compute.provider_tree [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 881.284127] env[69796]: INFO nova.compute.manager [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] [instance: f6bd68f4-3eb2-4203-bc00-2a5c7927cfac] Took 1.03 seconds to deallocate network for instance. [ 881.323792] env[69796]: ERROR nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [req-6fa89240-2f52-46e7-96a9-a62ea862725e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6fa89240-2f52-46e7-96a9-a62ea862725e"}]} [ 881.324221] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.324900] env[69796]: ERROR nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Traceback (most recent call last): [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] yield [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] self.set_inventory_for_provider( [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 881.324900] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6fa89240-2f52-46e7-96a9-a62ea862725e"}]} [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] During handling of the above exception, another exception occurred: [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Traceback (most recent call last): [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] with self.rt.instance_claim(context, instance, node, allocs, [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 881.325183] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] return f(*args, **kwargs) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] self._update(elevated, cn) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] self._update_to_placement(context, compute_node, startup) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] return attempt.get(self._wrap_exception) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] six.reraise(self.value[0], self.value[1], self.value[2]) [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] raise value [ 881.325435] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] self.reportclient.update_from_provider_tree( [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] with catch_all(pd.uuid): [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] self.gen.throw(typ, value, traceback) [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] raise exception.ResourceProviderSyncFailed() [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 881.325776] env[69796]: ERROR nova.compute.manager [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] [ 881.326177] env[69796]: DEBUG nova.compute.utils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 881.327377] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.598s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.329314] env[69796]: INFO nova.compute.claims [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.336019] env[69796]: DEBUG nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Build of instance 568ebebb-730e-40c4-a1a3-d03d7d4e5a85 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 881.336019] env[69796]: DEBUG nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 881.336019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Acquiring lock "refresh_cache-568ebebb-730e-40c4-a1a3-d03d7d4e5a85" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.336019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Acquired lock "refresh_cache-568ebebb-730e-40c4-a1a3-d03d7d4e5a85" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.336270] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 881.871960] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.986111] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.324454] env[69796]: INFO nova.scheduler.client.report [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Deleted allocations for instance f6bd68f4-3eb2-4203-bc00-2a5c7927cfac [ 882.371871] env[69796]: DEBUG nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 882.387823] env[69796]: DEBUG nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 882.388222] env[69796]: DEBUG nova.compute.provider_tree [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 882.402512] env[69796]: DEBUG nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 882.428276] env[69796]: DEBUG nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 882.492959] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Releasing lock "refresh_cache-568ebebb-730e-40c4-a1a3-d03d7d4e5a85" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.493251] env[69796]: DEBUG nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 882.493443] env[69796]: DEBUG nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 882.493616] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 882.513225] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 882.835383] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3ce88fd7-5108-4f96-a74a-3b9d3be877c9 tempest-ServerDiagnosticsNegativeTest-1695339752 tempest-ServerDiagnosticsNegativeTest-1695339752-project-member] Lock "f6bd68f4-3eb2-4203-bc00-2a5c7927cfac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.905s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.873464] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-230e776e-02f4-45a9-95b4-1edb64e36016 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.882957] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db3990f-ee0a-448d-8a8b-4015803196d8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.919176] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7a0dd2-7d0c-47a3-9fb6-5d1757dae154 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.927999] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598f8e9f-826d-4b9d-bc95-7d975a84c103 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.943631] env[69796]: DEBUG nova.compute.provider_tree [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.015732] env[69796]: DEBUG nova.network.neutron [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.285892] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "428c79e1-3a68-444b-8a4c-5ed2184539b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.286151] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "428c79e1-3a68-444b-8a4c-5ed2184539b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.337884] env[69796]: DEBUG nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 883.470059] env[69796]: ERROR nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [req-849c9c92-d2ac-4e44-96ae-fd24aec1de4b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-849c9c92-d2ac-4e44-96ae-fd24aec1de4b"}]} [ 883.470697] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.143s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.471054] env[69796]: ERROR nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Traceback (most recent call last): [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] yield [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] self.set_inventory_for_provider( [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 883.471054] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-849c9c92-d2ac-4e44-96ae-fd24aec1de4b"}]} [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] During handling of the above exception, another exception occurred: [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Traceback (most recent call last): [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] with self.rt.instance_claim(context, instance, node, allocs, [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 883.471325] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] return f(*args, **kwargs) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] self._update(elevated, cn) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] self._update_to_placement(context, compute_node, startup) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] return attempt.get(self._wrap_exception) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] six.reraise(self.value[0], self.value[1], self.value[2]) [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] raise value [ 883.471604] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] self.reportclient.update_from_provider_tree( [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] with catch_all(pd.uuid): [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] self.gen.throw(typ, value, traceback) [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] raise exception.ResourceProviderSyncFailed() [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 883.471963] env[69796]: ERROR nova.compute.manager [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] [ 883.472272] env[69796]: DEBUG nova.compute.utils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 883.474156] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.549s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.475558] env[69796]: INFO nova.compute.claims [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.478446] env[69796]: DEBUG nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Build of instance 3cebd244-f9e7-4360-8249-4e1720c4d557 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 883.478875] env[69796]: DEBUG nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 883.479115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "refresh_cache-3cebd244-f9e7-4360-8249-4e1720c4d557" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.479258] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquired lock "refresh_cache-3cebd244-f9e7-4360-8249-4e1720c4d557" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.479425] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.518688] env[69796]: INFO nova.compute.manager [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] [instance: 568ebebb-730e-40c4-a1a3-d03d7d4e5a85] Took 1.02 seconds to deallocate network for instance. [ 883.869875] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.013412] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 884.129208] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.524237] env[69796]: DEBUG nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 884.544771] env[69796]: DEBUG nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 884.545035] env[69796]: DEBUG nova.compute.provider_tree [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.554035] env[69796]: INFO nova.scheduler.client.report [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Deleted allocations for instance 568ebebb-730e-40c4-a1a3-d03d7d4e5a85 [ 884.561540] env[69796]: DEBUG nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 884.591924] env[69796]: DEBUG nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 884.631807] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Releasing lock "refresh_cache-3cebd244-f9e7-4360-8249-4e1720c4d557" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.632148] env[69796]: DEBUG nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 884.634243] env[69796]: DEBUG nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.634243] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.650883] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 885.022987] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ac5a13-5ff6-4648-8278-82099f24b8ed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.032382] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726aa3cb-c34d-4b96-9eda-d7baf3228efd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.063520] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cf1eb9-5a40-4cc0-999e-92c459bae87e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.066916] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df296f36-f2df-4d34-b8ab-cc309db644f4 tempest-ImagesOneServerNegativeTestJSON-940035275 tempest-ImagesOneServerNegativeTestJSON-940035275-project-member] Lock "568ebebb-730e-40c4-a1a3-d03d7d4e5a85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.173s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.072050] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8366f2c0-93d5-42be-9bae-a9130c60b307 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.087442] env[69796]: DEBUG nova.compute.provider_tree [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 885.152788] env[69796]: DEBUG nova.network.neutron [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.573134] env[69796]: DEBUG nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 885.632107] env[69796]: ERROR nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [req-523ccdd4-153b-486f-be3f-6d4ac012e186] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-523ccdd4-153b-486f-be3f-6d4ac012e186"}]} [ 885.632107] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.632467] env[69796]: ERROR nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Traceback (most recent call last): [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] yield [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] self.set_inventory_for_provider( [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 885.632467] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-523ccdd4-153b-486f-be3f-6d4ac012e186"}]} [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] During handling of the above exception, another exception occurred: [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Traceback (most recent call last): [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] with self.rt.instance_claim(context, instance, node, allocs, [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 885.632715] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] return f(*args, **kwargs) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] self._update(elevated, cn) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] self._update_to_placement(context, compute_node, startup) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] return attempt.get(self._wrap_exception) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] raise value [ 885.632987] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] self.reportclient.update_from_provider_tree( [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] with catch_all(pd.uuid): [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] self.gen.throw(typ, value, traceback) [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] raise exception.ResourceProviderSyncFailed() [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 885.634186] env[69796]: ERROR nova.compute.manager [instance: 0bbd7678-014c-4f77-8608-277bce12410d] [ 885.634554] env[69796]: DEBUG nova.compute.utils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 885.635948] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.631s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.637911] env[69796]: INFO nova.compute.claims [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.643282] env[69796]: DEBUG nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Build of instance 0bbd7678-014c-4f77-8608-277bce12410d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 885.643282] env[69796]: DEBUG nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 885.643282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Acquiring lock "refresh_cache-0bbd7678-014c-4f77-8608-277bce12410d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.643282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Acquired lock "refresh_cache-0bbd7678-014c-4f77-8608-277bce12410d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.643505] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.656434] env[69796]: INFO nova.compute.manager [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 3cebd244-f9e7-4360-8249-4e1720c4d557] Took 1.02 seconds to deallocate network for instance. [ 886.105624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.122437] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "c90ad510-78a4-4ee5-bcc5-cc564d26735a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.122437] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "c90ad510-78a4-4ee5-bcc5-cc564d26735a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.190469] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 886.354970] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.676037] env[69796]: DEBUG nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 886.691938] env[69796]: DEBUG nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 886.692209] env[69796]: DEBUG nova.compute.provider_tree [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 886.699399] env[69796]: INFO nova.scheduler.client.report [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Deleted allocations for instance 3cebd244-f9e7-4360-8249-4e1720c4d557 [ 886.706849] env[69796]: DEBUG nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 886.733291] env[69796]: DEBUG nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 886.857414] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Releasing lock "refresh_cache-0bbd7678-014c-4f77-8608-277bce12410d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.858491] env[69796]: DEBUG nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 886.858491] env[69796]: DEBUG nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.858491] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.992225] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.199898] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34647e44-43cc-445c-a20f-3af5ed0c69e8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.208106] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8200e50-0839-438b-b358-ed402b20138f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.244156] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4c88fc2e-0337-4f1c-8230-a93cf17bb45c tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "3cebd244-f9e7-4360-8249-4e1720c4d557" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.583s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.244954] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92833c9f-3181-4d8e-a155-c3e7155c0813 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.254027] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0905839d-82b6-4925-9bc2-8a12a327eb92 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.271152] env[69796]: DEBUG nova.compute.provider_tree [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 887.497233] env[69796]: DEBUG nova.network.neutron [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.751747] env[69796]: DEBUG nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.794100] env[69796]: ERROR nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [req-766170cf-d84f-44eb-931d-5909ba2d7676] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-766170cf-d84f-44eb-931d-5909ba2d7676"}]} [ 887.794678] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.159s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.795421] env[69796]: ERROR nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Traceback (most recent call last): [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] yield [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] self.set_inventory_for_provider( [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 887.795421] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-766170cf-d84f-44eb-931d-5909ba2d7676"}]} [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] During handling of the above exception, another exception occurred: [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Traceback (most recent call last): [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] with self.rt.instance_claim(context, instance, node, allocs, [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 887.795785] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] return f(*args, **kwargs) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] self._update(elevated, cn) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] self._update_to_placement(context, compute_node, startup) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] return attempt.get(self._wrap_exception) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] six.reraise(self.value[0], self.value[1], self.value[2]) [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] raise value [ 887.796106] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] self.reportclient.update_from_provider_tree( [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] with catch_all(pd.uuid): [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] self.gen.throw(typ, value, traceback) [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] raise exception.ResourceProviderSyncFailed() [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 887.796504] env[69796]: ERROR nova.compute.manager [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] [ 887.798901] env[69796]: DEBUG nova.compute.utils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 887.803025] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.670s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.803025] env[69796]: INFO nova.compute.claims [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 887.804930] env[69796]: DEBUG nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Build of instance 82c2040c-9ada-4d77-88b1-453545c66b61 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 887.805548] env[69796]: DEBUG nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 887.805923] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Acquiring lock "refresh_cache-82c2040c-9ada-4d77-88b1-453545c66b61" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.806207] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Acquired lock "refresh_cache-82c2040c-9ada-4d77-88b1-453545c66b61" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.806467] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 888.000033] env[69796]: INFO nova.compute.manager [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] [instance: 0bbd7678-014c-4f77-8608-277bce12410d] Took 1.14 seconds to deallocate network for instance. [ 888.277035] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.337581] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 888.456459] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.831422] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Acquiring lock "a2632638-403e-43e1-add1-949ef2b3d125" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.832407] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Lock "a2632638-403e-43e1-add1-949ef2b3d125" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.842135] env[69796]: DEBUG nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 888.857314] env[69796]: DEBUG nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 888.857314] env[69796]: DEBUG nova.compute.provider_tree [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 888.874824] env[69796]: DEBUG nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 888.903020] env[69796]: DEBUG nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 888.964423] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Releasing lock "refresh_cache-82c2040c-9ada-4d77-88b1-453545c66b61" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.964423] env[69796]: DEBUG nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 888.964423] env[69796]: DEBUG nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 888.964423] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 888.981062] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.041225] env[69796]: INFO nova.scheduler.client.report [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Deleted allocations for instance 0bbd7678-014c-4f77-8608-277bce12410d [ 889.380445] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b6fc44-4ee8-4642-8ce9-eb3485039c82 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.388813] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a591e229-c818-41d9-86d7-ceee8453e9b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.420931] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3850fb4a-6222-452b-b1fa-c16962835b76 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.429278] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88372202-20e6-4d4e-8a4c-6faaa6700001 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.443273] env[69796]: DEBUG nova.compute.provider_tree [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 889.492384] env[69796]: DEBUG nova.network.neutron [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.555113] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aae675b9-785b-4468-a346-265fd7563966 tempest-ServersTestBootFromVolume-1386714627 tempest-ServersTestBootFromVolume-1386714627-project-member] Lock "0bbd7678-014c-4f77-8608-277bce12410d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.390s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.968711] env[69796]: ERROR nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [req-aaf34c33-d9d5-4b65-a07e-111798c8c562] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aaf34c33-d9d5-4b65-a07e-111798c8c562"}]} [ 889.969146] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.969772] env[69796]: ERROR nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Traceback (most recent call last): [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] yield [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] self.set_inventory_for_provider( [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 889.969772] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aaf34c33-d9d5-4b65-a07e-111798c8c562"}]} [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] During handling of the above exception, another exception occurred: [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Traceback (most recent call last): [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] with self.rt.instance_claim(context, instance, node, allocs, [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 889.970019] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] return f(*args, **kwargs) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] self._update(elevated, cn) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] self._update_to_placement(context, compute_node, startup) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] return attempt.get(self._wrap_exception) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] six.reraise(self.value[0], self.value[1], self.value[2]) [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] raise value [ 889.970289] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] self.reportclient.update_from_provider_tree( [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] with catch_all(pd.uuid): [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] self.gen.throw(typ, value, traceback) [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] raise exception.ResourceProviderSyncFailed() [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 889.970658] env[69796]: ERROR nova.compute.manager [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] [ 889.970963] env[69796]: DEBUG nova.compute.utils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 889.971903] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.799s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.973497] env[69796]: INFO nova.compute.claims [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 889.976463] env[69796]: DEBUG nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Build of instance d9b2601d-1ebb-4609-90f3-180adb00c6bf was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 889.978451] env[69796]: DEBUG nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 889.978451] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Acquiring lock "refresh_cache-d9b2601d-1ebb-4609-90f3-180adb00c6bf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.978451] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Acquired lock "refresh_cache-d9b2601d-1ebb-4609-90f3-180adb00c6bf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.978451] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.995447] env[69796]: INFO nova.compute.manager [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] [instance: 82c2040c-9ada-4d77-88b1-453545c66b61] Took 1.03 seconds to deallocate network for instance. [ 890.057768] env[69796]: DEBUG nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 890.504018] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.576437] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.641315] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.858361] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "88536d9e-42b0-4115-b55c-7cf1bc03314b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.858591] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "88536d9e-42b0-4115-b55c-7cf1bc03314b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.009747] env[69796]: DEBUG nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 891.026324] env[69796]: DEBUG nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 891.026615] env[69796]: DEBUG nova.compute.provider_tree [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.041772] env[69796]: INFO nova.scheduler.client.report [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Deleted allocations for instance 82c2040c-9ada-4d77-88b1-453545c66b61 [ 891.048686] env[69796]: DEBUG nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 891.079162] env[69796]: DEBUG nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 891.144258] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Releasing lock "refresh_cache-d9b2601d-1ebb-4609-90f3-180adb00c6bf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.145291] env[69796]: DEBUG nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 891.145291] env[69796]: DEBUG nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 891.145291] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 891.162755] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.527561] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393fd3e7-3d54-4451-b10b-f9fe56ac3eb1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.535693] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a53ae4-8875-4366-95a0-9fbdbadaa5ef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.568047] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05ef7bc-c300-46ed-b144-ef315f6e3d6f tempest-ServerRescueTestJSON-611807943 tempest-ServerRescueTestJSON-611807943-project-member] Lock "82c2040c-9ada-4d77-88b1-453545c66b61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.461s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.568823] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb18df9-543c-4f49-8955-be8f8e74e6bd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.576967] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487551f5-8bc1-4cca-aa7c-eaedce837b22 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.592982] env[69796]: DEBUG nova.compute.provider_tree [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 891.668569] env[69796]: DEBUG nova.network.neutron [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.072836] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 892.120411] env[69796]: ERROR nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [req-67c00137-15b0-44bf-93e6-fecfe24486a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-67c00137-15b0-44bf-93e6-fecfe24486a4"}]} [ 892.120783] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.149s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.121375] env[69796]: ERROR nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Traceback (most recent call last): [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] yield [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] self.set_inventory_for_provider( [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 892.121375] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-67c00137-15b0-44bf-93e6-fecfe24486a4"}]} [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] During handling of the above exception, another exception occurred: [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Traceback (most recent call last): [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] with self.rt.instance_claim(context, instance, node, allocs, [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 892.121702] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] return f(*args, **kwargs) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] self._update(elevated, cn) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] self._update_to_placement(context, compute_node, startup) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] return attempt.get(self._wrap_exception) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] six.reraise(self.value[0], self.value[1], self.value[2]) [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] raise value [ 892.122052] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] self.reportclient.update_from_provider_tree( [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] with catch_all(pd.uuid): [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] self.gen.throw(typ, value, traceback) [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] raise exception.ResourceProviderSyncFailed() [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 892.122743] env[69796]: ERROR nova.compute.manager [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] [ 892.123280] env[69796]: DEBUG nova.compute.utils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 892.123681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.893s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.125155] env[69796]: INFO nova.compute.claims [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 892.130174] env[69796]: DEBUG nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Build of instance bec0df68-c72b-4ecd-9a03-c8bf02f8059e was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 892.130174] env[69796]: DEBUG nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 892.130174] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Acquiring lock "refresh_cache-bec0df68-c72b-4ecd-9a03-c8bf02f8059e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.130174] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Acquired lock "refresh_cache-bec0df68-c72b-4ecd-9a03-c8bf02f8059e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.131321] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 892.170696] env[69796]: INFO nova.compute.manager [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] [instance: d9b2601d-1ebb-4609-90f3-180adb00c6bf] Took 1.03 seconds to deallocate network for instance. [ 892.600686] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.662541] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.778520] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.162922] env[69796]: DEBUG nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 893.186124] env[69796]: DEBUG nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 893.186798] env[69796]: DEBUG nova.compute.provider_tree [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.199526] env[69796]: DEBUG nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 893.210147] env[69796]: INFO nova.scheduler.client.report [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Deleted allocations for instance d9b2601d-1ebb-4609-90f3-180adb00c6bf [ 893.222621] env[69796]: DEBUG nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 893.281667] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Releasing lock "refresh_cache-bec0df68-c72b-4ecd-9a03-c8bf02f8059e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.282885] env[69796]: DEBUG nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 893.284038] env[69796]: DEBUG nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 893.284038] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 893.312891] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 893.719627] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c647e871-221a-44b4-b396-5e4fea49af60 tempest-ServerActionsV293TestJSON-1380610808 tempest-ServerActionsV293TestJSON-1380610808-project-member] Lock "d9b2601d-1ebb-4609-90f3-180adb00c6bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.540s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.733255] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe15bc53-647a-4cdc-9f91-345985c3c93e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.744640] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2eb5d3b-daf1-48ad-9bdd-c742389461d0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.782236] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396d284a-b639-479f-a93c-a4b631032d8c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.792022] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce6f4ec-c1f9-453c-9658-714c51d987f9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.806647] env[69796]: DEBUG nova.compute.provider_tree [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.815428] env[69796]: DEBUG nova.network.neutron [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.224396] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 894.324697] env[69796]: INFO nova.compute.manager [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] [instance: bec0df68-c72b-4ecd-9a03-c8bf02f8059e] Took 1.04 seconds to deallocate network for instance. [ 894.337023] env[69796]: ERROR nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [req-94ffdc37-dd9b-4e1f-b9fa-d55cb67e6f64] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-94ffdc37-dd9b-4e1f-b9fa-d55cb67e6f64"}]} [ 894.337023] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.337235] env[69796]: ERROR nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Traceback (most recent call last): [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] yield [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] self.set_inventory_for_provider( [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 894.337235] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-94ffdc37-dd9b-4e1f-b9fa-d55cb67e6f64"}]} [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] During handling of the above exception, another exception occurred: [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Traceback (most recent call last): [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] with self.rt.instance_claim(context, instance, node, allocs, [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 894.337475] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] return f(*args, **kwargs) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] self._update(elevated, cn) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] self._update_to_placement(context, compute_node, startup) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] return attempt.get(self._wrap_exception) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] raise value [ 894.337769] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] self.reportclient.update_from_provider_tree( [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] with catch_all(pd.uuid): [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] self.gen.throw(typ, value, traceback) [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] raise exception.ResourceProviderSyncFailed() [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 894.338171] env[69796]: ERROR nova.compute.manager [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] [ 894.338494] env[69796]: DEBUG nova.compute.utils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 894.338494] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.819s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.338494] env[69796]: INFO nova.compute.claims [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.339527] env[69796]: DEBUG nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Build of instance 4de7ecb8-c591-430d-8e87-70749358f05d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 894.339928] env[69796]: DEBUG nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 894.340234] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "refresh_cache-4de7ecb8-c591-430d-8e87-70749358f05d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.340388] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquired lock "refresh_cache-4de7ecb8-c591-430d-8e87-70749358f05d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.340552] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 894.759218] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.880414] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.054800] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.373121] env[69796]: INFO nova.scheduler.client.report [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Deleted allocations for instance bec0df68-c72b-4ecd-9a03-c8bf02f8059e [ 895.393894] env[69796]: DEBUG nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 895.411163] env[69796]: DEBUG nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 895.412440] env[69796]: DEBUG nova.compute.provider_tree [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 895.429844] env[69796]: DEBUG nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 895.455554] env[69796]: DEBUG nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 895.558735] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Releasing lock "refresh_cache-4de7ecb8-c591-430d-8e87-70749358f05d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.558989] env[69796]: DEBUG nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 895.559205] env[69796]: DEBUG nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 895.559374] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 895.580035] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 895.882409] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c1d864b-96d0-4d61-acab-e44dccd5b9e6 tempest-ServerActionsTestOtherA-1361495306 tempest-ServerActionsTestOtherA-1361495306-project-member] Lock "bec0df68-c72b-4ecd-9a03-c8bf02f8059e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.737s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.922630] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e66aea8-a69b-42f8-a8f0-b784a3a937b1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.934091] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbf64b3-37fa-4611-a49b-f477ff50ac3a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.973906] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b62762-b572-4634-955a-65378d9b5c26 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.983831] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e23dfc-33d8-4ed2-9a42-6d99f32ffded {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.002555] env[69796]: DEBUG nova.compute.provider_tree [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 896.085605] env[69796]: DEBUG nova.network.neutron [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.389330] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 896.534317] env[69796]: ERROR nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [req-4d8d0fe8-0860-46b6-a78c-c2323a98a89a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4d8d0fe8-0860-46b6-a78c-c2323a98a89a"}]} [ 896.534317] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.200s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.534807] env[69796]: ERROR nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Traceback (most recent call last): [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] yield [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] self.set_inventory_for_provider( [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 896.534807] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4d8d0fe8-0860-46b6-a78c-c2323a98a89a"}]} [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] During handling of the above exception, another exception occurred: [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Traceback (most recent call last): [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] with self.rt.instance_claim(context, instance, node, allocs, [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 896.535219] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] return f(*args, **kwargs) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] self._update(elevated, cn) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] self._update_to_placement(context, compute_node, startup) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] return attempt.get(self._wrap_exception) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] raise value [ 896.535681] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] self.reportclient.update_from_provider_tree( [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] with catch_all(pd.uuid): [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] self.gen.throw(typ, value, traceback) [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] raise exception.ResourceProviderSyncFailed() [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 896.536324] env[69796]: ERROR nova.compute.manager [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] [ 896.540891] env[69796]: DEBUG nova.compute.utils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 896.540891] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.984s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.540891] env[69796]: INFO nova.compute.claims [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.544465] env[69796]: DEBUG nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Build of instance 6237eecf-7560-45c1-9fcd-6bd2a0747e7f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 896.544929] env[69796]: DEBUG nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 896.545170] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "refresh_cache-6237eecf-7560-45c1-9fcd-6bd2a0747e7f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.545314] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquired lock "refresh_cache-6237eecf-7560-45c1-9fcd-6bd2a0747e7f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.545474] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.592045] env[69796]: INFO nova.compute.manager [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 4de7ecb8-c591-430d-8e87-70749358f05d] Took 1.03 seconds to deallocate network for instance. [ 896.916936] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.076274] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 897.207837] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.584618] env[69796]: DEBUG nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 897.599236] env[69796]: DEBUG nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 897.599236] env[69796]: DEBUG nova.compute.provider_tree [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Updating resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 generation from 78 to 79 during operation: update_inventory {{(pid=69796) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 897.599236] env[69796]: DEBUG nova.compute.provider_tree [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 897.615987] env[69796]: DEBUG nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 897.626232] env[69796]: INFO nova.scheduler.client.report [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Deleted allocations for instance 4de7ecb8-c591-430d-8e87-70749358f05d [ 897.636516] env[69796]: DEBUG nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 897.712497] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Releasing lock "refresh_cache-6237eecf-7560-45c1-9fcd-6bd2a0747e7f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.713332] env[69796]: DEBUG nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 897.713332] env[69796]: DEBUG nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 897.713735] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 897.759175] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.070338] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "f7d7c4dd-6d36-4334-8039-0e348420a65d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.070468] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "f7d7c4dd-6d36-4334-8039-0e348420a65d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.132574] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6b1959-8859-4709-b05d-fb43d949541b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.140554] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3349cedb-a670-49ae-942a-dbaea101dae4 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "4de7ecb8-c591-430d-8e87-70749358f05d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.017s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.145906] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68665c38-c108-4cd5-9ffe-00afc26784c0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.187441] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4affde1-8c39-4041-a3e5-f98f02f897f7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.196315] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f07266-3bd1-4e29-8cc4-085cd8bd7295 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.213094] env[69796]: DEBUG nova.compute.provider_tree [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 898.266266] env[69796]: DEBUG nova.network.neutron [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.645525] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 898.748162] env[69796]: ERROR nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [req-cc85c635-2978-4069-98d8-f22707bd8b0e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-cc85c635-2978-4069-98d8-f22707bd8b0e"}]} [ 898.748162] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.748363] env[69796]: ERROR nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Traceback (most recent call last): [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] yield [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] self.set_inventory_for_provider( [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 898.748363] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-cc85c635-2978-4069-98d8-f22707bd8b0e"}]} [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] During handling of the above exception, another exception occurred: [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Traceback (most recent call last): [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] with self.rt.instance_claim(context, instance, node, allocs, [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 898.748644] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] return f(*args, **kwargs) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] self._update(elevated, cn) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] self._update_to_placement(context, compute_node, startup) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] return attempt.get(self._wrap_exception) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] six.reraise(self.value[0], self.value[1], self.value[2]) [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] raise value [ 898.748940] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] self.reportclient.update_from_provider_tree( [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] with catch_all(pd.uuid): [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] self.gen.throw(typ, value, traceback) [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] raise exception.ResourceProviderSyncFailed() [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 898.749530] env[69796]: ERROR nova.compute.manager [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] [ 898.749915] env[69796]: DEBUG nova.compute.utils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 898.751415] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.881s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.753659] env[69796]: INFO nova.compute.claims [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.756880] env[69796]: DEBUG nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Build of instance 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 898.762095] env[69796]: DEBUG nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 898.762095] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Acquiring lock "refresh_cache-09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.762095] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Acquired lock "refresh_cache-09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.762095] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.768798] env[69796]: INFO nova.compute.manager [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 6237eecf-7560-45c1-9fcd-6bd2a0747e7f] Took 1.06 seconds to deallocate network for instance. [ 898.784624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "6a05bec6-521d-456c-9804-92aa05f38c0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.784796] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "6a05bec6-521d-456c-9804-92aa05f38c0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.159417] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "029d73b8-bca8-4225-b332-fea194dd3d1e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.159881] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "029d73b8-bca8-4225-b332-fea194dd3d1e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.176337] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.289444] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 899.549758] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.788171] env[69796]: DEBUG nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 899.801744] env[69796]: DEBUG nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 899.801956] env[69796]: DEBUG nova.compute.provider_tree [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 899.809714] env[69796]: INFO nova.scheduler.client.report [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Deleted allocations for instance 6237eecf-7560-45c1-9fcd-6bd2a0747e7f [ 899.824022] env[69796]: DEBUG nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 899.845583] env[69796]: DEBUG nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 900.052750] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Releasing lock "refresh_cache-09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.053013] env[69796]: DEBUG nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 900.053203] env[69796]: DEBUG nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 900.053370] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.070535] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.252860] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c3419ee-ddc7-486b-a6fd-d0b3e2258eda {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.261083] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e248c3b-b988-410e-9d95-a4fdd16f3da5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.296289] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6063455c-bfc1-4e20-a994-cd5d90739204 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.304658] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51674a3c-c732-41ef-8e50-9a5a92c744ae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.319472] env[69796]: DEBUG nova.compute.provider_tree [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 900.328485] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a973d15e-a1c0-4274-a04d-10189c57e04f tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "6237eecf-7560-45c1-9fcd-6bd2a0747e7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.805s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.572957] env[69796]: DEBUG nova.network.neutron [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.833836] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 900.846154] env[69796]: ERROR nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [req-6f6aab14-de52-4867-ab35-20041bb52ef7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6f6aab14-de52-4867-ab35-20041bb52ef7"}]} [ 900.846154] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.095s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.846564] env[69796]: ERROR nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] Traceback (most recent call last): [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] yield [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] self.set_inventory_for_provider( [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 900.846564] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6f6aab14-de52-4867-ab35-20041bb52ef7"}]} [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] During handling of the above exception, another exception occurred: [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] Traceback (most recent call last): [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] with self.rt.instance_claim(context, instance, node, allocs, [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 900.846794] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] return f(*args, **kwargs) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] self._update(elevated, cn) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] self._update_to_placement(context, compute_node, startup) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] return attempt.get(self._wrap_exception) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] raise value [ 900.847242] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] self.reportclient.update_from_provider_tree( [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] with catch_all(pd.uuid): [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] self.gen.throw(typ, value, traceback) [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] raise exception.ResourceProviderSyncFailed() [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 900.848020] env[69796]: ERROR nova.compute.manager [instance: d3002060-482c-4307-845e-5f00b085d06b] [ 900.848428] env[69796]: DEBUG nova.compute.utils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 900.851098] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.743s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.852598] env[69796]: INFO nova.compute.claims [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 900.858324] env[69796]: DEBUG nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Build of instance d3002060-482c-4307-845e-5f00b085d06b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 900.859548] env[69796]: DEBUG nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 900.859655] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "refresh_cache-d3002060-482c-4307-845e-5f00b085d06b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.859847] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquired lock "refresh_cache-d3002060-482c-4307-845e-5f00b085d06b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.860020] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 900.981473] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Acquiring lock "d46a5e64-1de4-4f92-b06b-f5367ffea72f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.981473] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Lock "d46a5e64-1de4-4f92-b06b-f5367ffea72f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.075414] env[69796]: INFO nova.compute.manager [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] [instance: 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a] Took 1.02 seconds to deallocate network for instance. [ 901.359390] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.381261] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.450265] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.514381] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.514672] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.515143] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.515393] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.515583] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.518099] env[69796]: INFO nova.compute.manager [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Terminating instance [ 901.884103] env[69796]: DEBUG nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 901.897173] env[69796]: DEBUG nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 901.897399] env[69796]: DEBUG nova.compute.provider_tree [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 901.908374] env[69796]: DEBUG nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 901.924162] env[69796]: DEBUG nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 901.953766] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Releasing lock "refresh_cache-d3002060-482c-4307-845e-5f00b085d06b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.954051] env[69796]: DEBUG nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 901.954250] env[69796]: DEBUG nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.954413] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.971314] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 902.022511] env[69796]: DEBUG nova.compute.manager [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.022737] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.023781] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da22cfd-1e95-4400-803f-8313dae7484d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.031464] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.033664] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f54aa316-671e-4416-bc9b-5b0ff40a27d7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.040628] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 902.040628] env[69796]: value = "task-4234402" [ 902.040628] env[69796]: _type = "Task" [ 902.040628] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.050290] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234402, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.103244] env[69796]: INFO nova.scheduler.client.report [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Deleted allocations for instance 09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a [ 902.252150] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "0002f8b8-91e0-4868-80c7-a70bcd9fc40c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.252330] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "0002f8b8-91e0-4868-80c7-a70bcd9fc40c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.324702] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a5c11d-4edc-4dbe-9aa8-7bd018152eae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.332999] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47864202-9236-422e-831d-8307c93b8ebc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.363711] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5171d5bf-904f-4145-a98f-184f2eac69b1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.372030] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd19b63d-b4d1-4cec-a26b-66b8c7ed33a0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.387245] env[69796]: DEBUG nova.compute.provider_tree [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 902.474882] env[69796]: DEBUG nova.network.neutron [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.554845] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234402, 'name': PowerOffVM_Task, 'duration_secs': 0.190168} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.555117] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.555473] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.555829] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b501794f-ef38-4313-9b52-aa5c936374eb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.613674] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f1993ca3-0f6a-4435-aaf3-fdd0c76a9a7a tempest-ServerDiagnosticsTest-384181760 tempest-ServerDiagnosticsTest-384181760-project-member] Lock "09cca7a4-b23e-4c42-b5d6-fd6ebf14fb1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.240s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.621370] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.621600] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.621882] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Deleting the datastore file [datastore2] 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.622171] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2457a036-4651-4411-ad99-7e2e5e305d7e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.629523] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for the task: (returnval){ [ 902.629523] env[69796]: value = "task-4234404" [ 902.629523] env[69796]: _type = "Task" [ 902.629523] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.638335] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.917235] env[69796]: ERROR nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [req-62f8b62d-59bc-412d-9d85-b365571f12d0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-62f8b62d-59bc-412d-9d85-b365571f12d0"}]} [ 902.917704] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.070s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.918348] env[69796]: ERROR nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Traceback (most recent call last): [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] yield [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] self.set_inventory_for_provider( [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 902.918348] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-62f8b62d-59bc-412d-9d85-b365571f12d0"}]} [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] During handling of the above exception, another exception occurred: [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Traceback (most recent call last): [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] with self.rt.instance_claim(context, instance, node, allocs, [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 902.918638] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] return f(*args, **kwargs) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] self._update(elevated, cn) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] self._update_to_placement(context, compute_node, startup) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] return attempt.get(self._wrap_exception) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] six.reraise(self.value[0], self.value[1], self.value[2]) [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] raise value [ 902.918903] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] self.reportclient.update_from_provider_tree( [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] with catch_all(pd.uuid): [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] self.gen.throw(typ, value, traceback) [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] raise exception.ResourceProviderSyncFailed() [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 902.919304] env[69796]: ERROR nova.compute.manager [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] [ 902.919647] env[69796]: DEBUG nova.compute.utils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 902.920607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.645s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.923413] env[69796]: INFO nova.compute.claims [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 902.925526] env[69796]: DEBUG nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Build of instance 0236eafc-d173-4ccf-ba01-5341c01fb5cd was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 902.926287] env[69796]: DEBUG nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 902.926287] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Acquiring lock "refresh_cache-0236eafc-d173-4ccf-ba01-5341c01fb5cd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.926818] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Acquired lock "refresh_cache-0236eafc-d173-4ccf-ba01-5341c01fb5cd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.926818] env[69796]: DEBUG nova.network.neutron [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.978928] env[69796]: INFO nova.compute.manager [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: d3002060-482c-4307-845e-5f00b085d06b] Took 1.02 seconds to deallocate network for instance. [ 903.116663] env[69796]: DEBUG nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 903.140211] env[69796]: DEBUG oslo_vmware.api [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Task: {'id': task-4234404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142968} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.140211] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.140415] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.140623] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.140774] env[69796]: INFO nova.compute.manager [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Took 1.12 seconds to destroy the instance on the hypervisor. [ 903.141044] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.141242] env[69796]: DEBUG nova.compute.manager [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 903.141333] env[69796]: DEBUG nova.network.neutron [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.452160] env[69796]: DEBUG nova.network.neutron [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 903.527638] env[69796]: DEBUG nova.network.neutron [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.638403] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.671837] env[69796]: DEBUG nova.compute.manager [req-02ae1775-11e3-4c0e-9630-1b981283d6b2 req-07a7fe75-4aeb-4c29-b312-384bdc1783e1 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Received event network-vif-deleted-3c9189a9-01dd-42e1-b2b3-9d0f3f53448e {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 903.672920] env[69796]: INFO nova.compute.manager [req-02ae1775-11e3-4c0e-9630-1b981283d6b2 req-07a7fe75-4aeb-4c29-b312-384bdc1783e1 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Neutron deleted interface 3c9189a9-01dd-42e1-b2b3-9d0f3f53448e; detaching it from the instance and deleting it from the info cache [ 903.672920] env[69796]: DEBUG nova.network.neutron [req-02ae1775-11e3-4c0e-9630-1b981283d6b2 req-07a7fe75-4aeb-4c29-b312-384bdc1783e1 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.951235] env[69796]: DEBUG nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 903.965594] env[69796]: DEBUG nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 903.965889] env[69796]: DEBUG nova.compute.provider_tree [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 903.981182] env[69796]: DEBUG nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 904.001399] env[69796]: DEBUG nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 904.011375] env[69796]: INFO nova.scheduler.client.report [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Deleted allocations for instance d3002060-482c-4307-845e-5f00b085d06b [ 904.031777] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Releasing lock "refresh_cache-0236eafc-d173-4ccf-ba01-5341c01fb5cd" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.031777] env[69796]: DEBUG nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 904.031777] env[69796]: DEBUG nova.compute.manager [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] [instance: 0236eafc-d173-4ccf-ba01-5341c01fb5cd] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 904.153963] env[69796]: DEBUG nova.network.neutron [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.176792] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c70f797a-8416-481c-b08e-aa2073c52366 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.188785] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c943033-5ce6-4bbb-a5c4-00c8f688c328 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.217849] env[69796]: DEBUG nova.compute.manager [req-02ae1775-11e3-4c0e-9630-1b981283d6b2 req-07a7fe75-4aeb-4c29-b312-384bdc1783e1 service nova] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Detach interface failed, port_id=3c9189a9-01dd-42e1-b2b3-9d0f3f53448e, reason: Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 904.402024] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ba4f38-dccd-4b49-a259-9f559282035d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.409555] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e2fb52-d741-414d-ac8c-8a571296a17c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.443281] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20e5d291-0901-4735-8594-f02bcfc6cb3b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.451803] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318577d4-7ba1-4393-a46e-1e44020890b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.468778] env[69796]: DEBUG nova.compute.provider_tree [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 904.525457] env[69796]: DEBUG oslo_concurrency.lockutils [None req-eaca219a-f2fd-4699-ad00-318913bb2a53 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "d3002060-482c-4307-845e-5f00b085d06b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.438s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.656552] env[69796]: INFO nova.compute.manager [-] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Took 1.52 seconds to deallocate network for instance. [ 904.999914] env[69796]: ERROR nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [req-075f248c-2b65-4b4c-8abe-028beeae0afd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-075f248c-2b65-4b4c-8abe-028beeae0afd"}]} [ 905.000339] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.080s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.000924] env[69796]: ERROR nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Traceback (most recent call last): [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] yield [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] self.set_inventory_for_provider( [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 905.000924] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-075f248c-2b65-4b4c-8abe-028beeae0afd"}]} [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] During handling of the above exception, another exception occurred: [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Traceback (most recent call last): [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] with self.rt.instance_claim(context, instance, node, allocs, [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 905.001326] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] return f(*args, **kwargs) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] self._update(elevated, cn) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] self._update_to_placement(context, compute_node, startup) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] return attempt.get(self._wrap_exception) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] six.reraise(self.value[0], self.value[1], self.value[2]) [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] raise value [ 905.001626] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] self.reportclient.update_from_provider_tree( [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] with catch_all(pd.uuid): [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] self.gen.throw(typ, value, traceback) [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] raise exception.ResourceProviderSyncFailed() [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 905.002117] env[69796]: ERROR nova.compute.manager [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] [ 905.002620] env[69796]: DEBUG nova.compute.utils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 905.003707] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.427s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.010295] env[69796]: INFO nova.compute.claims [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.013809] env[69796]: DEBUG nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Build of instance 9c91466a-5057-4dbf-ad3b-c84120d8435a was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 905.014457] env[69796]: DEBUG nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 905.014700] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Acquiring lock "refresh_cache-9c91466a-5057-4dbf-ad3b-c84120d8435a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.014882] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Acquired lock "refresh_cache-9c91466a-5057-4dbf-ad3b-c84120d8435a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.014997] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.028980] env[69796]: DEBUG nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.068151] env[69796]: INFO nova.scheduler.client.report [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Deleted allocations for instance 0236eafc-d173-4ccf-ba01-5341c01fb5cd [ 905.165365] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.541460] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.551590] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.579833] env[69796]: DEBUG oslo_concurrency.lockutils [None req-795e052c-c1aa-4e57-9f15-a4ef1575c42a tempest-ServerShowV257Test-124528321 tempest-ServerShowV257Test-124528321-project-member] Lock "0236eafc-d173-4ccf-ba01-5341c01fb5cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.305s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.660954] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.049555] env[69796]: DEBUG nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 906.071620] env[69796]: DEBUG nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 906.071840] env[69796]: DEBUG nova.compute.provider_tree [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.084652] env[69796]: DEBUG nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 906.088372] env[69796]: DEBUG nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 906.115393] env[69796]: DEBUG nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 906.164709] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Releasing lock "refresh_cache-9c91466a-5057-4dbf-ad3b-c84120d8435a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.164956] env[69796]: DEBUG nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 906.165164] env[69796]: DEBUG nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.165340] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.183322] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.597135] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceaf6029-d4aa-42b6-844d-41bd08e6d7b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.609064] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3bfa7e-affe-4f22-a9cc-318722f39845 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.644744] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.645134] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b039d63f-8925-437f-adbd-03fb0231e18c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.655077] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6a2efc-9f9e-4190-abb8-5a245202d071 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.674831] env[69796]: DEBUG nova.compute.provider_tree [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 906.686200] env[69796]: DEBUG nova.network.neutron [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.188331] env[69796]: INFO nova.compute.manager [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] [instance: 9c91466a-5057-4dbf-ad3b-c84120d8435a] Took 1.02 seconds to deallocate network for instance. [ 907.209653] env[69796]: ERROR nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [req-4ce7ccf9-b447-4510-b0be-0d75502f8828] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4ce7ccf9-b447-4510-b0be-0d75502f8828"}]} [ 907.210069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.210707] env[69796]: ERROR nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Traceback (most recent call last): [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] yield [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] self.set_inventory_for_provider( [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 907.210707] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4ce7ccf9-b447-4510-b0be-0d75502f8828"}]} [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] During handling of the above exception, another exception occurred: [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Traceback (most recent call last): [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] with self.rt.instance_claim(context, instance, node, allocs, [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 907.211119] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] return f(*args, **kwargs) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] self._update(elevated, cn) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] self._update_to_placement(context, compute_node, startup) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] return attempt.get(self._wrap_exception) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] six.reraise(self.value[0], self.value[1], self.value[2]) [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] raise value [ 907.211538] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] self.reportclient.update_from_provider_tree( [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] with catch_all(pd.uuid): [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] self.gen.throw(typ, value, traceback) [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] raise exception.ResourceProviderSyncFailed() [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 907.212132] env[69796]: ERROR nova.compute.manager [instance: af3f3f51-5368-43b7-b69b-44fe28004777] [ 907.212898] env[69796]: DEBUG nova.compute.utils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 907.214631] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.614s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.216342] env[69796]: INFO nova.compute.claims [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.220132] env[69796]: DEBUG nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Build of instance af3f3f51-5368-43b7-b69b-44fe28004777 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 907.220689] env[69796]: DEBUG nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 907.220937] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "refresh_cache-af3f3f51-5368-43b7-b69b-44fe28004777" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.221235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquired lock "refresh_cache-af3f3f51-5368-43b7-b69b-44fe28004777" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.221402] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 907.755400] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 907.811013] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "5fa540ab-5cc9-4240-8c0d-3c92743d152f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.811282] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "5fa540ab-5cc9-4240-8c0d-3c92743d152f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.855022] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.238177] env[69796]: INFO nova.scheduler.client.report [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Deleted allocations for instance 9c91466a-5057-4dbf-ad3b-c84120d8435a [ 908.264809] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 908.283147] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 908.283451] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 908.297259] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 908.320145] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 908.358169] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Releasing lock "refresh_cache-af3f3f51-5368-43b7-b69b-44fe28004777" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.358394] env[69796]: DEBUG nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 908.358509] env[69796]: DEBUG nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 908.358680] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.379724] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 908.673300] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e114ecee-8d78-4f26-8a7e-7b4e2d89fe9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.682496] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e11a96-ec64-41d1-92f0-1c137fc78b03 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.722264] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e19b31-0726-4f75-9f1b-1970c06b92f9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.731225] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782431b3-1a5b-4375-9538-c419699a0633 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.746426] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 908.752235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-02c7fd19-82fd-4558-b7c8-2618e335334f tempest-ServerMetadataNegativeTestJSON-2130934007 tempest-ServerMetadataNegativeTestJSON-2130934007-project-member] Lock "9c91466a-5057-4dbf-ad3b-c84120d8435a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.596s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.884058] env[69796]: DEBUG nova.network.neutron [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.256057] env[69796]: DEBUG nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 909.272847] env[69796]: ERROR nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [req-da449a12-6735-447a-bf14-41efa28aa2ee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-da449a12-6735-447a-bf14-41efa28aa2ee"}]} [ 909.273308] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.059s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.273914] env[69796]: ERROR nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Traceback (most recent call last): [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] yield [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] self.set_inventory_for_provider( [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 909.273914] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-da449a12-6735-447a-bf14-41efa28aa2ee"}]} [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] During handling of the above exception, another exception occurred: [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Traceback (most recent call last): [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] with self.rt.instance_claim(context, instance, node, allocs, [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 909.274293] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] return f(*args, **kwargs) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] self._update(elevated, cn) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] self._update_to_placement(context, compute_node, startup) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] return attempt.get(self._wrap_exception) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] six.reraise(self.value[0], self.value[1], self.value[2]) [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] raise value [ 909.274625] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] self.reportclient.update_from_provider_tree( [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] with catch_all(pd.uuid): [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] self.gen.throw(typ, value, traceback) [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] raise exception.ResourceProviderSyncFailed() [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 909.275069] env[69796]: ERROR nova.compute.manager [instance: 6b1e871c-0971-4c37-a852-14fea283c815] [ 909.275423] env[69796]: DEBUG nova.compute.utils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 909.275777] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.517s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.277389] env[69796]: INFO nova.compute.claims [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.281257] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Build of instance 6b1e871c-0971-4c37-a852-14fea283c815 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 909.281740] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 909.281987] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "refresh_cache-6b1e871c-0971-4c37-a852-14fea283c815" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.282154] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquired lock "refresh_cache-6b1e871c-0971-4c37-a852-14fea283c815" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.282318] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 909.389403] env[69796]: INFO nova.compute.manager [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: af3f3f51-5368-43b7-b69b-44fe28004777] Took 1.03 seconds to deallocate network for instance. [ 909.781227] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.815948] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 909.894620] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.312693] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 910.336167] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 910.336415] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 910.350224] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 910.371861] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 910.400008] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Releasing lock "refresh_cache-6b1e871c-0971-4c37-a852-14fea283c815" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.400008] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 910.400104] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.400259] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.422436] env[69796]: INFO nova.scheduler.client.report [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Deleted allocations for instance af3f3f51-5368-43b7-b69b-44fe28004777 [ 910.429157] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.710260] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac7096d-abd5-4ddd-8baf-44184c4bbc07 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.719066] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf558f1-e302-43b6-b95f-3822e8ece402 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.754509] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cae653e-21c4-4aea-8f52-eff0f489d96d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.763083] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99e8d3e-c98d-4a1a-a649-ad7d51696861 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.777491] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 910.936205] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.936205] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c9d732ff-0824-42a2-b1b5-d8ec615aa223 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "af3f3f51-5368-43b7-b69b-44fe28004777" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.437s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.302948] env[69796]: ERROR nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [req-7331e7c0-e2b3-446b-9832-20e6e60695b0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7331e7c0-e2b3-446b-9832-20e6e60695b0"}]} [ 911.303588] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.028s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.304502] env[69796]: ERROR nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Traceback (most recent call last): [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] yield [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] self.set_inventory_for_provider( [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 911.304502] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-7331e7c0-e2b3-446b-9832-20e6e60695b0"}]} [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] During handling of the above exception, another exception occurred: [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Traceback (most recent call last): [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] with self.rt.instance_claim(context, instance, node, allocs, [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 911.304758] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] return f(*args, **kwargs) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] self._update(elevated, cn) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] self._update_to_placement(context, compute_node, startup) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] return attempt.get(self._wrap_exception) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] raise value [ 911.305054] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] self.reportclient.update_from_provider_tree( [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] with catch_all(pd.uuid): [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] self.gen.throw(typ, value, traceback) [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] raise exception.ResourceProviderSyncFailed() [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 911.305413] env[69796]: ERROR nova.compute.manager [instance: fd090570-97f0-4afc-a512-eb4be373c51c] [ 911.306276] env[69796]: DEBUG nova.compute.utils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 911.307784] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.391s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.309782] env[69796]: INFO nova.compute.claims [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 911.313098] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Build of instance fd090570-97f0-4afc-a512-eb4be373c51c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 911.314106] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 911.314106] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "refresh_cache-fd090570-97f0-4afc-a512-eb4be373c51c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.314106] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquired lock "refresh_cache-fd090570-97f0-4afc-a512-eb4be373c51c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.314265] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 911.439501] env[69796]: INFO nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: 6b1e871c-0971-4c37-a852-14fea283c815] Took 1.04 seconds to deallocate network for instance. [ 911.440675] env[69796]: DEBUG nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.842958] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 911.962605] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.979584] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.345724] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 912.353006] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "c5c38995-c5b4-457e-badf-7f2eabb203f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.353309] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "c5c38995-c5b4-457e-badf-7f2eabb203f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.361706] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 912.361933] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.373120] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 912.401990] env[69796]: DEBUG nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 912.474544] env[69796]: INFO nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Deleted allocations for instance 6b1e871c-0971-4c37-a852-14fea283c815 [ 912.484198] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Releasing lock "refresh_cache-fd090570-97f0-4afc-a512-eb4be373c51c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.484327] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 912.484581] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 912.484670] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 912.504139] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.769190] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349dd4cb-8840-4401-8b0b-0c7207334cea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.777750] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58aa757-7f4b-4da5-bd92-44d37537772a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.810938] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81955eec-a957-4056-bac6-bf8d24f69364 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.818463] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8883b1b4-120c-4da9-8850-cafe3ad9cf07 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.832665] env[69796]: DEBUG nova.compute.provider_tree [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 912.987681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "6b1e871c-0971-4c37-a852-14fea283c815" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.399s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.006783] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.362140] env[69796]: ERROR nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [req-f94515b2-2465-4ff4-a02f-246916cd488d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f94515b2-2465-4ff4-a02f-246916cd488d"}]} [ 913.362777] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.055s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.363704] env[69796]: ERROR nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Traceback (most recent call last): [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] yield [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] self.set_inventory_for_provider( [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 913.363704] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-f94515b2-2465-4ff4-a02f-246916cd488d"}]} [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] During handling of the above exception, another exception occurred: [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Traceback (most recent call last): [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] with self.rt.instance_claim(context, instance, node, allocs, [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 913.364121] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] return f(*args, **kwargs) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] self._update(elevated, cn) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] self._update_to_placement(context, compute_node, startup) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] return attempt.get(self._wrap_exception) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] raise value [ 913.364427] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] self.reportclient.update_from_provider_tree( [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] with catch_all(pd.uuid): [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] self.gen.throw(typ, value, traceback) [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] raise exception.ResourceProviderSyncFailed() [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 913.364801] env[69796]: ERROR nova.compute.manager [instance: da71a468-a227-493a-b4d3-d92b7626b18c] [ 913.365128] env[69796]: DEBUG nova.compute.utils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 913.365305] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.189s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.366734] env[69796]: INFO nova.compute.claims [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.373020] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Build of instance da71a468-a227-493a-b4d3-d92b7626b18c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 913.373020] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 913.373020] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquiring lock "refresh_cache-da71a468-a227-493a-b4d3-d92b7626b18c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.373020] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Acquired lock "refresh_cache-da71a468-a227-493a-b4d3-d92b7626b18c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.373319] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.490216] env[69796]: DEBUG nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 913.511335] env[69796]: INFO nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: fd090570-97f0-4afc-a512-eb4be373c51c] Took 1.02 seconds to deallocate network for instance. [ 913.894912] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 913.971121] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.011167] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.172490] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Acquiring lock "815e394b-8c3e-4628-a2bf-2933e4cc960c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.172574] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Lock "815e394b-8c3e-4628-a2bf-2933e4cc960c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.398878] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 914.413328] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 914.413625] env[69796]: DEBUG nova.compute.provider_tree [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.426623] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 914.447415] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 914.474190] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Releasing lock "refresh_cache-da71a468-a227-493a-b4d3-d92b7626b18c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.474431] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 914.474617] env[69796]: DEBUG nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 914.474790] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 914.495185] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.546380] env[69796]: INFO nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Deleted allocations for instance fd090570-97f0-4afc-a512-eb4be373c51c [ 914.821477] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b01163c-c06c-4af8-9495-64a0613d78cf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.829824] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e0f73b-c53d-4d4b-aaa5-f505f354e57c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.860792] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c24137-f76f-4eec-9e02-5fcf6bee14df {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.869085] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5044a38-54e9-47a1-80d2-b242779552c2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.883466] env[69796]: DEBUG nova.compute.provider_tree [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 914.997494] env[69796]: DEBUG nova.network.neutron [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.060854] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "fd090570-97f0-4afc-a512-eb4be373c51c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.417s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.403441] env[69796]: ERROR nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [req-18dd8c80-dc75-4993-821a-851742ffdc09] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-18dd8c80-dc75-4993-821a-851742ffdc09"}]} [ 915.403878] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.039s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.404435] env[69796]: ERROR nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Traceback (most recent call last): [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] yield [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] self.set_inventory_for_provider( [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 915.404435] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-18dd8c80-dc75-4993-821a-851742ffdc09"}]} [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] During handling of the above exception, another exception occurred: [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Traceback (most recent call last): [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] with self.rt.instance_claim(context, instance, node, allocs, [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 915.405143] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] return f(*args, **kwargs) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] self._update(elevated, cn) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] self._update_to_placement(context, compute_node, startup) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] return attempt.get(self._wrap_exception) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] six.reraise(self.value[0], self.value[1], self.value[2]) [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] raise value [ 915.405721] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] self.reportclient.update_from_provider_tree( [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] with catch_all(pd.uuid): [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] self.gen.throw(typ, value, traceback) [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] raise exception.ResourceProviderSyncFailed() [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 915.406463] env[69796]: ERROR nova.compute.manager [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] [ 915.407104] env[69796]: DEBUG nova.compute.utils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 915.407104] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.047s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.407814] env[69796]: INFO nova.compute.claims [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.410446] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Build of instance 6777b20d-7cfe-44a0-aaff-c58318ad88e8 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 915.410856] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 915.411086] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "refresh_cache-6777b20d-7cfe-44a0-aaff-c58318ad88e8" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.411235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquired lock "refresh_cache-6777b20d-7cfe-44a0-aaff-c58318ad88e8" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.411392] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.503662] env[69796]: INFO nova.compute.manager [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] [instance: da71a468-a227-493a-b4d3-d92b7626b18c] Took 1.03 seconds to deallocate network for instance. [ 915.563932] env[69796]: DEBUG nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 915.932162] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.014338] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.084915] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.434882] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 916.449423] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 916.449668] env[69796]: DEBUG nova.compute.provider_tree [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 916.461637] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 916.481146] env[69796]: DEBUG nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 916.517642] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Releasing lock "refresh_cache-6777b20d-7cfe-44a0-aaff-c58318ad88e8" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.517882] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 916.518119] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.518272] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.532736] env[69796]: INFO nova.scheduler.client.report [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Deleted allocations for instance da71a468-a227-493a-b4d3-d92b7626b18c [ 916.538655] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 916.779364] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e529c3b-c157-47d4-bff3-dcc16fb134f4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.787946] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27413ad4-af0d-4c7e-b024-1cb6847bc19f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.818667] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14f4bed-6444-447e-b818-499d9d08a734 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.827411] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2068fac-a25d-4e8c-8829-f1ee33f75f78 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.844058] env[69796]: DEBUG nova.compute.provider_tree [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 917.043556] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.044932] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f51d653a-029d-46cd-ae66-2132a6ef8bab tempest-ListServersNegativeTestJSON-220279662 tempest-ListServersNegativeTestJSON-220279662-project-member] Lock "da71a468-a227-493a-b4d3-d92b7626b18c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.369s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.368967] env[69796]: ERROR nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [req-ddbbdb93-d986-4fe7-9276-b276c9df63e3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ddbbdb93-d986-4fe7-9276-b276c9df63e3"}]} [ 917.369417] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.963s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.370411] env[69796]: ERROR nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Traceback (most recent call last): [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] yield [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] self.set_inventory_for_provider( [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 917.370411] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ddbbdb93-d986-4fe7-9276-b276c9df63e3"}]} [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] During handling of the above exception, another exception occurred: [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Traceback (most recent call last): [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] with self.rt.instance_claim(context, instance, node, allocs, [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 917.370696] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] return f(*args, **kwargs) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] self._update(elevated, cn) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] self._update_to_placement(context, compute_node, startup) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] return attempt.get(self._wrap_exception) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] six.reraise(self.value[0], self.value[1], self.value[2]) [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] raise value [ 917.371064] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] self.reportclient.update_from_provider_tree( [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] with catch_all(pd.uuid): [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] self.gen.throw(typ, value, traceback) [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] raise exception.ResourceProviderSyncFailed() [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 917.371483] env[69796]: ERROR nova.compute.manager [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] [ 917.371834] env[69796]: DEBUG nova.compute.utils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 917.372755] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.735s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.374335] env[69796]: INFO nova.compute.claims [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 917.377308] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Build of instance 951b3b77-765e-41c8-866e-b0bb4bd45559 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 917.377708] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 917.377963] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquiring lock "refresh_cache-951b3b77-765e-41c8-866e-b0bb4bd45559" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.378130] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Acquired lock "refresh_cache-951b3b77-765e-41c8-866e-b0bb4bd45559" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.378296] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 917.547817] env[69796]: INFO nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 6777b20d-7cfe-44a0-aaff-c58318ad88e8] Took 1.03 seconds to deallocate network for instance. [ 917.550907] env[69796]: DEBUG nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 917.899695] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 917.972527] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.077138] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.403040] env[69796]: DEBUG nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 918.475785] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Releasing lock "refresh_cache-951b3b77-765e-41c8-866e-b0bb4bd45559" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.476337] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 918.476337] env[69796]: DEBUG nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.476546] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.495280] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 918.499024] env[69796]: DEBUG nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 918.499260] env[69796]: DEBUG nova.compute.provider_tree [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.511399] env[69796]: DEBUG nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 918.528984] env[69796]: DEBUG nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 918.577650] env[69796]: INFO nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Deleted allocations for instance 6777b20d-7cfe-44a0-aaff-c58318ad88e8 [ 918.880504] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e08123-8c22-419b-89c2-c0a45d5beeac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.889625] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8441c4f2-1477-47a0-8a2d-e8746265ea9d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.921905] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17a1582-d6a7-41af-a4a7-a9f75cd64782 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.933969] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5725c4bc-fd3e-4952-ad0d-3767a9bafffc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.946896] env[69796]: DEBUG nova.compute.provider_tree [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 918.998068] env[69796]: DEBUG nova.network.neutron [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.087974] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "6777b20d-7cfe-44a0-aaff-c58318ad88e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.482s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.476438] env[69796]: ERROR nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [req-2d056894-05fa-4b38-8583-cc63db28bf40] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2d056894-05fa-4b38-8583-cc63db28bf40"}]} [ 919.477016] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.104s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.477919] env[69796]: ERROR nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Traceback (most recent call last): [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] yield [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] self.set_inventory_for_provider( [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 919.477919] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2d056894-05fa-4b38-8583-cc63db28bf40"}]} [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] During handling of the above exception, another exception occurred: [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Traceback (most recent call last): [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] with self.rt.instance_claim(context, instance, node, allocs, [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 919.478234] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] return f(*args, **kwargs) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] self._update(elevated, cn) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] self._update_to_placement(context, compute_node, startup) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] return attempt.get(self._wrap_exception) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] raise value [ 919.478553] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] self.reportclient.update_from_provider_tree( [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] with catch_all(pd.uuid): [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] self.gen.throw(typ, value, traceback) [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] raise exception.ResourceProviderSyncFailed() [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 919.478967] env[69796]: ERROR nova.compute.manager [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] [ 919.479323] env[69796]: DEBUG nova.compute.utils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 919.480741] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.315s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.481092] env[69796]: DEBUG nova.objects.instance [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lazy-loading 'resources' on Instance uuid 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.484029] env[69796]: DEBUG nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Build of instance 428c79e1-3a68-444b-8a4c-5ed2184539b6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 919.484029] env[69796]: DEBUG nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 919.484219] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "refresh_cache-428c79e1-3a68-444b-8a4c-5ed2184539b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.484268] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquired lock "refresh_cache-428c79e1-3a68-444b-8a4c-5ed2184539b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.484398] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 919.502452] env[69796]: INFO nova.compute.manager [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] [instance: 951b3b77-765e-41c8-866e-b0bb4bd45559] Took 1.03 seconds to deallocate network for instance. [ 919.592049] env[69796]: DEBUG nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 920.011951] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.014764] env[69796]: DEBUG nova.scheduler.client.report [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 920.045034] env[69796]: DEBUG nova.scheduler.client.report [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 920.045034] env[69796]: DEBUG nova.compute.provider_tree [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.066278] env[69796]: DEBUG nova.scheduler.client.report [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 920.090579] env[69796]: DEBUG nova.scheduler.client.report [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 920.120575] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.122215] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.532675] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c52d59f-72e5-4a59-996a-b4d9bf8bf591 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.538391] env[69796]: INFO nova.scheduler.client.report [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Deleted allocations for instance 951b3b77-765e-41c8-866e-b0bb4bd45559 [ 920.552264] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dcda0fb-a35a-416e-9eee-0647a728dcf8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.595264] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e673ae-d284-4282-ac4e-d756de1c48e3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.604893] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd3a2d8-daf9-4c7f-a328-66565e0b990d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.621012] env[69796]: DEBUG nova.compute.provider_tree [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.626502] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Releasing lock "refresh_cache-428c79e1-3a68-444b-8a4c-5ed2184539b6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.626766] env[69796]: DEBUG nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 920.626937] env[69796]: DEBUG nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 920.627207] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 920.644474] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.049661] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b256afcb-72cd-472a-82be-95788de884e9 tempest-MultipleCreateTestJSON-1417547446 tempest-MultipleCreateTestJSON-1417547446-project-member] Lock "951b3b77-765e-41c8-866e-b0bb4bd45559" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.414s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.147266] env[69796]: DEBUG nova.network.neutron [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.149050] env[69796]: ERROR nova.scheduler.client.report [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [req-4d92aa08-a82a-4ce5-bc70-74b9307d60ab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4d92aa08-a82a-4ce5-bc70-74b9307d60ab"}]} [ 921.149354] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.151026] env[69796]: ERROR nova.compute.manager [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Traceback (most recent call last): [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] yield [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self.set_inventory_for_provider( [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 921.151026] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4d92aa08-a82a-4ce5-bc70-74b9307d60ab"}]} [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] During handling of the above exception, another exception occurred: [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Traceback (most recent call last): [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self._delete_instance(context, instance, bdms) [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 921.151407] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self._complete_deletion(context, instance) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self._update_resource_tracker(context, instance) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self.rt.update_usage(context, instance, instance.node) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] return f(*args, **kwargs) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self._update(context.elevated(), self.compute_nodes[nodename]) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self._update_to_placement(context, compute_node, startup) [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 921.151733] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] return attempt.get(self._wrap_exception) [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] six.reraise(self.value[0], self.value[1], self.value[2]) [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] raise value [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self.reportclient.update_from_provider_tree( [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] with catch_all(pd.uuid): [ 921.152140] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 921.152506] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] self.gen.throw(typ, value, traceback) [ 921.152506] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 921.152506] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] raise exception.ResourceProviderSyncFailed() [ 921.152506] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 921.152506] env[69796]: ERROR nova.compute.manager [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] [ 921.152506] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.601s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.153726] env[69796]: INFO nova.compute.claims [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.189078] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.189721] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.554838] env[69796]: DEBUG nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 921.657351] env[69796]: INFO nova.compute.manager [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 428c79e1-3a68-444b-8a4c-5ed2184539b6] Took 1.03 seconds to deallocate network for instance. [ 921.663890] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.146s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.696104] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.696447] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.697434] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.697434] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.697434] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.697434] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 921.697434] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 921.697949] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 922.083345] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.192704] env[69796]: DEBUG nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 922.201059] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.211612] env[69796]: DEBUG nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 922.211962] env[69796]: DEBUG nova.compute.provider_tree [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.223392] env[69796]: DEBUG nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 922.246061] env[69796]: DEBUG nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 922.534879] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f74dbb-09a0-4c5f-a5f9-c12e395d2036 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.546734] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68a494d-b385-4075-9368-8f411d72c8be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.587837] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345976ff-d21b-4ad6-91d3-6bec1104645c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.599533] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b09489-530e-406f-930c-d40a480434d4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.614468] env[69796]: DEBUG nova.compute.provider_tree [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 922.701018] env[69796]: INFO nova.scheduler.client.report [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Deleted allocations for instance 428c79e1-3a68-444b-8a4c-5ed2184539b6 [ 923.136142] env[69796]: ERROR nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [req-90bbbe6e-6ae9-47f8-8a8b-0187bf7d8c1f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-90bbbe6e-6ae9-47f8-8a8b-0187bf7d8c1f"}]} [ 923.137324] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.984s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.137324] env[69796]: ERROR nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Traceback (most recent call last): [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] yield [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] self.set_inventory_for_provider( [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 923.137324] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-90bbbe6e-6ae9-47f8-8a8b-0187bf7d8c1f"}]} [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] During handling of the above exception, another exception occurred: [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Traceback (most recent call last): [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] with self.rt.instance_claim(context, instance, node, allocs, [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 923.138240] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] return f(*args, **kwargs) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] self._update(elevated, cn) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] self._update_to_placement(context, compute_node, startup) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] return attempt.get(self._wrap_exception) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] six.reraise(self.value[0], self.value[1], self.value[2]) [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] raise value [ 923.138550] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] self.reportclient.update_from_provider_tree( [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] with catch_all(pd.uuid): [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] self.gen.throw(typ, value, traceback) [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] raise exception.ResourceProviderSyncFailed() [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 923.139170] env[69796]: ERROR nova.compute.manager [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] [ 923.139666] env[69796]: DEBUG nova.compute.utils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 923.139666] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.495s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.140454] env[69796]: INFO nova.compute.claims [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.143252] env[69796]: DEBUG nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Build of instance c90ad510-78a4-4ee5-bcc5-cc564d26735a was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 923.143674] env[69796]: DEBUG nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 923.143976] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-c90ad510-78a4-4ee5-bcc5-cc564d26735a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.144056] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-c90ad510-78a4-4ee5-bcc5-cc564d26735a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.144236] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.183021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.209789] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b2d6d7bb-1511-42be-b3fb-8c6146560006 tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "428c79e1-3a68-444b-8a4c-5ed2184539b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.922s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.669830] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.711103] env[69796]: DEBUG nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.781180] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.173854] env[69796]: DEBUG nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 924.196928] env[69796]: DEBUG nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 924.197175] env[69796]: DEBUG nova.compute.provider_tree [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 924.215707] env[69796]: DEBUG nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 924.237982] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.238991] env[69796]: DEBUG nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 924.284316] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-c90ad510-78a4-4ee5-bcc5-cc564d26735a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.284948] env[69796]: DEBUG nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 924.284948] env[69796]: DEBUG nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.284948] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.311028] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.548902] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17f0b24-f065-4c7d-bdf0-54d5147940c5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.558096] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72777fb4-2e78-4044-8d82-64006d1aa595 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.595173] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3646221-458e-473e-a8e2-6bc8a2ef22e5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.603860] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20891c2-8e81-43c8-aa64-1499d2c4aaed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.624388] env[69796]: DEBUG nova.compute.provider_tree [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 924.814761] env[69796]: DEBUG nova.network.neutron [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.152942] env[69796]: ERROR nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [req-b0623fac-a997-4862-a74a-c5bdb6d30f25] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b0623fac-a997-4862-a74a-c5bdb6d30f25"}]} [ 925.155944] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.015s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.155944] env[69796]: ERROR nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] Traceback (most recent call last): [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] yield [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] self.set_inventory_for_provider( [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 925.155944] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-b0623fac-a997-4862-a74a-c5bdb6d30f25"}]} [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] During handling of the above exception, another exception occurred: [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] Traceback (most recent call last): [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] with self.rt.instance_claim(context, instance, node, allocs, [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 925.156309] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] return f(*args, **kwargs) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] self._update(elevated, cn) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] self._update_to_placement(context, compute_node, startup) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] return attempt.get(self._wrap_exception) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] six.reraise(self.value[0], self.value[1], self.value[2]) [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] raise value [ 925.156599] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] self.reportclient.update_from_provider_tree( [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] with catch_all(pd.uuid): [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] self.gen.throw(typ, value, traceback) [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] raise exception.ResourceProviderSyncFailed() [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 925.157151] env[69796]: ERROR nova.compute.manager [instance: a2632638-403e-43e1-add1-949ef2b3d125] [ 925.157475] env[69796]: DEBUG nova.compute.utils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 925.162022] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.378s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.162022] env[69796]: INFO nova.compute.claims [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.164214] env[69796]: DEBUG nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Build of instance a2632638-403e-43e1-add1-949ef2b3d125 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 925.164799] env[69796]: DEBUG nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 925.165330] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Acquiring lock "refresh_cache-a2632638-403e-43e1-add1-949ef2b3d125" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.165624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Acquired lock "refresh_cache-a2632638-403e-43e1-add1-949ef2b3d125" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.165934] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 925.320517] env[69796]: INFO nova.compute.manager [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: c90ad510-78a4-4ee5-bcc5-cc564d26735a] Took 1.03 seconds to deallocate network for instance. [ 925.694105] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 925.780146] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.199169] env[69796]: DEBUG nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 926.213309] env[69796]: DEBUG nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 926.214027] env[69796]: DEBUG nova.compute.provider_tree [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 926.225247] env[69796]: DEBUG nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 926.247500] env[69796]: DEBUG nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 926.280659] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Releasing lock "refresh_cache-a2632638-403e-43e1-add1-949ef2b3d125" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.280851] env[69796]: DEBUG nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 926.281056] env[69796]: DEBUG nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 926.281230] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 926.298998] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 926.349583] env[69796]: INFO nova.scheduler.client.report [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted allocations for instance c90ad510-78a4-4ee5-bcc5-cc564d26735a [ 926.606700] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b4faaf-baf2-4122-99c6-fb5d49fe373b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.615708] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6711afc-57be-4de0-820d-a8803211daa9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.653137] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4b2a9a-4762-4221-99fc-fc11e86c1a6b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.663600] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "3af4d581-8476-4b47-a2e5-eb1c99deb2bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.663834] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "3af4d581-8476-4b47-a2e5-eb1c99deb2bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.665172] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a593a92-e693-47a3-aae1-4a077f7b4f51 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.679926] env[69796]: DEBUG nova.compute.provider_tree [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 926.802239] env[69796]: DEBUG nova.network.neutron [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.861206] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8a7ec0ed-28d6-4234-86e1-4b33fe36c251 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "c90ad510-78a4-4ee5-bcc5-cc564d26735a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.739s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.208598] env[69796]: ERROR nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [req-4390fb0d-a29e-4a20-ac2f-c6bba542d6c9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4390fb0d-a29e-4a20-ac2f-c6bba542d6c9"}]} [ 927.211017] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.051s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.211017] env[69796]: ERROR nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Traceback (most recent call last): [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] yield [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] self.set_inventory_for_provider( [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 927.211017] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4390fb0d-a29e-4a20-ac2f-c6bba542d6c9"}]} [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] During handling of the above exception, another exception occurred: [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Traceback (most recent call last): [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] with self.rt.instance_claim(context, instance, node, allocs, [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 927.211371] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] return f(*args, **kwargs) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] self._update(elevated, cn) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] self._update_to_placement(context, compute_node, startup) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] return attempt.get(self._wrap_exception) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] raise value [ 927.211686] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] self.reportclient.update_from_provider_tree( [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] with catch_all(pd.uuid): [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] self.gen.throw(typ, value, traceback) [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] raise exception.ResourceProviderSyncFailed() [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 927.212097] env[69796]: ERROR nova.compute.manager [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] [ 927.212473] env[69796]: DEBUG nova.compute.utils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 927.213962] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.251s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.215540] env[69796]: INFO nova.compute.claims [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.218667] env[69796]: DEBUG nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Build of instance 88536d9e-42b0-4115-b55c-7cf1bc03314b was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 927.219139] env[69796]: DEBUG nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 927.219400] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquiring lock "refresh_cache-88536d9e-42b0-4115-b55c-7cf1bc03314b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.219569] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Acquired lock "refresh_cache-88536d9e-42b0-4115-b55c-7cf1bc03314b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.219738] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.224787] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Acquiring lock "b2c473de-7aee-428d-87d2-d747f12402f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.225012] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Lock "b2c473de-7aee-428d-87d2-d747f12402f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.305687] env[69796]: INFO nova.compute.manager [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] [instance: a2632638-403e-43e1-add1-949ef2b3d125] Took 1.02 seconds to deallocate network for instance. [ 927.363549] env[69796]: DEBUG nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 927.756860] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.885142] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.891251] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.255995] env[69796]: DEBUG nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 928.275270] env[69796]: DEBUG nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 928.275492] env[69796]: DEBUG nova.compute.provider_tree [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.289222] env[69796]: DEBUG nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 928.310243] env[69796]: DEBUG nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 928.340718] env[69796]: INFO nova.scheduler.client.report [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Deleted allocations for instance a2632638-403e-43e1-add1-949ef2b3d125 [ 928.390819] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Releasing lock "refresh_cache-88536d9e-42b0-4115-b55c-7cf1bc03314b" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.391073] env[69796]: DEBUG nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 928.391264] env[69796]: DEBUG nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.391432] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.424530] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.525702] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.525950] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.682133] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d4d6b9-7c78-4559-b72d-bb68f08c52a4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.690857] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb4c098-9757-4b3c-90e4-347287bae48a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.723621] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39340a2-84df-475b-b9ee-9b69f79339ca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.731858] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bc0a6f-dd60-45b0-8551-b325039d7362 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.750143] env[69796]: DEBUG nova.compute.provider_tree [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 928.853371] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5815c0c9-a52c-41bd-8150-45827dfe8e4d tempest-ServerTagsTestJSON-540437378 tempest-ServerTagsTestJSON-540437378-project-member] Lock "a2632638-403e-43e1-add1-949ef2b3d125" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.021s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.928825] env[69796]: DEBUG nova.network.neutron [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.285116] env[69796]: ERROR nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [req-0dd34644-9c94-4bef-a182-3bf1ac3cf247] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0dd34644-9c94-4bef-a182-3bf1ac3cf247"}]} [ 929.285116] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.070s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.285458] env[69796]: ERROR nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Traceback (most recent call last): [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] yield [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] self.set_inventory_for_provider( [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 929.285458] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-0dd34644-9c94-4bef-a182-3bf1ac3cf247"}]} [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] During handling of the above exception, another exception occurred: [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Traceback (most recent call last): [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] with self.rt.instance_claim(context, instance, node, allocs, [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 929.285788] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] return f(*args, **kwargs) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] self._update(elevated, cn) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] self._update_to_placement(context, compute_node, startup) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] return attempt.get(self._wrap_exception) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] six.reraise(self.value[0], self.value[1], self.value[2]) [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] raise value [ 929.286168] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] self.reportclient.update_from_provider_tree( [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] with catch_all(pd.uuid): [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] self.gen.throw(typ, value, traceback) [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] raise exception.ResourceProviderSyncFailed() [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 929.286609] env[69796]: ERROR nova.compute.manager [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] [ 929.286964] env[69796]: DEBUG nova.compute.utils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 929.289091] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.278s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.290853] env[69796]: INFO nova.compute.claims [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 929.293754] env[69796]: DEBUG nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Build of instance f7d7c4dd-6d36-4334-8039-0e348420a65d was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 929.295654] env[69796]: DEBUG nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 929.295654] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "refresh_cache-f7d7c4dd-6d36-4334-8039-0e348420a65d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.295654] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "refresh_cache-f7d7c4dd-6d36-4334-8039-0e348420a65d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.295654] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 929.358874] env[69796]: DEBUG nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 929.434517] env[69796]: INFO nova.compute.manager [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] [instance: 88536d9e-42b0-4115-b55c-7cf1bc03314b] Took 1.04 seconds to deallocate network for instance. [ 929.822040] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 929.882222] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.906086] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.321811] env[69796]: DEBUG nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 930.339056] env[69796]: DEBUG nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 930.339056] env[69796]: DEBUG nova.compute.provider_tree [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.363019] env[69796]: DEBUG nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 930.388372] env[69796]: DEBUG nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.414697] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "refresh_cache-f7d7c4dd-6d36-4334-8039-0e348420a65d" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.414919] env[69796]: DEBUG nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 930.415121] env[69796]: DEBUG nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 930.415296] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 930.435708] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.466469] env[69796]: INFO nova.scheduler.client.report [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Deleted allocations for instance 88536d9e-42b0-4115-b55c-7cf1bc03314b [ 930.769438] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c43d494-c65d-4657-b495-46d2f6faefe7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.780290] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3c481a-0dd9-4ef3-bdf2-5cc1964335b7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.813752] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c454c323-330c-4cdd-8e08-257c1a88ba7e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.823444] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79f977c-3c29-40bc-bf81-dfe40a5697ee {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.838632] env[69796]: DEBUG nova.compute.provider_tree [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.939752] env[69796]: DEBUG nova.network.neutron [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.978476] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f04d847d-b1eb-43c1-88d9-fdc0ebe79666 tempest-AttachInterfacesTestJSON-527541319 tempest-AttachInterfacesTestJSON-527541319-project-member] Lock "88536d9e-42b0-4115-b55c-7cf1bc03314b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.119s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.366313] env[69796]: ERROR nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [req-93d84d9e-21c8-4bfc-bb6c-bb6c591222a1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-93d84d9e-21c8-4bfc-bb6c-bb6c591222a1"}]} [ 931.366947] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.078s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.367597] env[69796]: ERROR nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Traceback (most recent call last): [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] yield [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] self.set_inventory_for_provider( [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 931.367597] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-93d84d9e-21c8-4bfc-bb6c-bb6c591222a1"}]} [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] During handling of the above exception, another exception occurred: [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Traceback (most recent call last): [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] with self.rt.instance_claim(context, instance, node, allocs, [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 931.367874] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] return f(*args, **kwargs) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] self._update(elevated, cn) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] self._update_to_placement(context, compute_node, startup) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] return attempt.get(self._wrap_exception) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] raise value [ 931.368172] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] self.reportclient.update_from_provider_tree( [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] with catch_all(pd.uuid): [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] self.gen.throw(typ, value, traceback) [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] raise exception.ResourceProviderSyncFailed() [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 931.368602] env[69796]: ERROR nova.compute.manager [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] [ 931.369104] env[69796]: DEBUG nova.compute.utils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 931.370267] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.285s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.372061] env[69796]: INFO nova.compute.claims [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 931.375055] env[69796]: DEBUG nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Build of instance 6a05bec6-521d-456c-9804-92aa05f38c0c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 931.375977] env[69796]: DEBUG nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 931.376341] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "refresh_cache-6a05bec6-521d-456c-9804-92aa05f38c0c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.376521] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquired lock "refresh_cache-6a05bec6-521d-456c-9804-92aa05f38c0c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.376683] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 931.398171] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.398403] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.443144] env[69796]: INFO nova.compute.manager [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: f7d7c4dd-6d36-4334-8039-0e348420a65d] Took 1.03 seconds to deallocate network for instance. [ 931.485981] env[69796]: DEBUG nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 931.900364] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.011316] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.021455] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.401740] env[69796]: DEBUG nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 932.415955] env[69796]: DEBUG nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 932.416258] env[69796]: DEBUG nova.compute.provider_tree [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.427364] env[69796]: DEBUG nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 932.446254] env[69796]: DEBUG nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 932.476018] env[69796]: INFO nova.scheduler.client.report [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleted allocations for instance f7d7c4dd-6d36-4334-8039-0e348420a65d [ 932.524548] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Releasing lock "refresh_cache-6a05bec6-521d-456c-9804-92aa05f38c0c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.524792] env[69796]: DEBUG nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 932.524983] env[69796]: DEBUG nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 932.525179] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 932.545998] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.790077] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ab41ad-081a-45f0-b22e-8c4c85f699ba {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.800034] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f54402f-ed4a-471e-a635-a579fcaffcb0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.832272] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db212a41-a325-41a1-9591-6cef313f0a3d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.841911] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3f4f13-c448-4271-abb2-945e8c751436 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.858157] env[69796]: DEBUG nova.compute.provider_tree [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 932.985069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-bbccc395-8989-4d81-bd24-5be8b561e38a tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "f7d7c4dd-6d36-4334-8039-0e348420a65d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.914s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.051345] env[69796]: DEBUG nova.network.neutron [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.382756] env[69796]: ERROR nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [req-a780826f-4118-4f8d-90e0-a138c3e08d4d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a780826f-4118-4f8d-90e0-a138c3e08d4d"}]} [ 933.383159] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.013s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.383746] env[69796]: ERROR nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Traceback (most recent call last): [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] yield [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] self.set_inventory_for_provider( [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 933.383746] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a780826f-4118-4f8d-90e0-a138c3e08d4d"}]} [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] During handling of the above exception, another exception occurred: [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Traceback (most recent call last): [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] with self.rt.instance_claim(context, instance, node, allocs, [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 933.384072] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] return f(*args, **kwargs) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] self._update(elevated, cn) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] self._update_to_placement(context, compute_node, startup) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] return attempt.get(self._wrap_exception) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] six.reraise(self.value[0], self.value[1], self.value[2]) [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] raise value [ 933.384387] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] self.reportclient.update_from_provider_tree( [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] with catch_all(pd.uuid): [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] self.gen.throw(typ, value, traceback) [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] raise exception.ResourceProviderSyncFailed() [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 933.384809] env[69796]: ERROR nova.compute.manager [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] [ 933.385178] env[69796]: DEBUG nova.compute.utils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 933.385635] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.309s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.387169] env[69796]: INFO nova.compute.claims [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.390051] env[69796]: DEBUG nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Build of instance 029d73b8-bca8-4225-b332-fea194dd3d1e was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 933.390292] env[69796]: DEBUG nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 933.390467] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquiring lock "refresh_cache-029d73b8-bca8-4225-b332-fea194dd3d1e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.390617] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Acquired lock "refresh_cache-029d73b8-bca8-4225-b332-fea194dd3d1e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.390848] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.490048] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 933.553838] env[69796]: INFO nova.compute.manager [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 6a05bec6-521d-456c-9804-92aa05f38c0c] Took 1.03 seconds to deallocate network for instance. [ 933.918241] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.017889] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.088031] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.417728] env[69796]: DEBUG nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 934.435640] env[69796]: DEBUG nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 934.435875] env[69796]: DEBUG nova.compute.provider_tree [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.454634] env[69796]: DEBUG nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 934.476641] env[69796]: DEBUG nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 934.589378] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Releasing lock "refresh_cache-029d73b8-bca8-4225-b332-fea194dd3d1e" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.589669] env[69796]: DEBUG nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 934.589839] env[69796]: DEBUG nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 934.589986] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 934.594941] env[69796]: INFO nova.scheduler.client.report [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Deleted allocations for instance 6a05bec6-521d-456c-9804-92aa05f38c0c [ 934.631269] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 934.868565] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cf0287-6f41-4149-9955-204c1d8e31ea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.876743] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca479be-bb11-49ef-ad68-c1e91355d0fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.910374] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495c50a3-e2e9-405b-a388-cdd997be6032 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.919632] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cd453a-53f8-45b5-ab42-d4de42441888 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.935245] env[69796]: DEBUG nova.compute.provider_tree [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 935.113864] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9f7618e9-5e2d-429e-87f2-17c224a7948a tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "6a05bec6-521d-456c-9804-92aa05f38c0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.329s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.138643] env[69796]: DEBUG nova.network.neutron [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.429040] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.429268] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.462978] env[69796]: ERROR nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [req-ead40773-efc2-429b-9472-610aa2131b38] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ead40773-efc2-429b-9472-610aa2131b38"}]} [ 935.463351] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.078s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.463934] env[69796]: ERROR nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Traceback (most recent call last): [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] yield [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] self.set_inventory_for_provider( [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 935.463934] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-ead40773-efc2-429b-9472-610aa2131b38"}]} [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] During handling of the above exception, another exception occurred: [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Traceback (most recent call last): [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] with self.rt.instance_claim(context, instance, node, allocs, [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 935.465112] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] return f(*args, **kwargs) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] self._update(elevated, cn) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] self._update_to_placement(context, compute_node, startup) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] return attempt.get(self._wrap_exception) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] raise value [ 935.465625] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] self.reportclient.update_from_provider_tree( [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] with catch_all(pd.uuid): [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] self.gen.throw(typ, value, traceback) [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] raise exception.ResourceProviderSyncFailed() [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 935.466233] env[69796]: ERROR nova.compute.manager [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] [ 935.466760] env[69796]: DEBUG nova.compute.utils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 935.466760] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.346s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.467796] env[69796]: INFO nova.compute.claims [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.473057] env[69796]: DEBUG nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Build of instance d46a5e64-1de4-4f92-b06b-f5367ffea72f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 935.473057] env[69796]: DEBUG nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 935.473057] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Acquiring lock "refresh_cache-d46a5e64-1de4-4f92-b06b-f5367ffea72f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.473057] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Acquired lock "refresh_cache-d46a5e64-1de4-4f92-b06b-f5367ffea72f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.473615] env[69796]: DEBUG nova.network.neutron [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.617760] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.644452] env[69796]: INFO nova.compute.manager [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] [instance: 029d73b8-bca8-4225-b332-fea194dd3d1e] Took 1.05 seconds to deallocate network for instance. [ 936.000072] env[69796]: DEBUG nova.network.neutron [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.063474] env[69796]: DEBUG nova.network.neutron [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.144353] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.495954] env[69796]: DEBUG nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 936.509087] env[69796]: DEBUG nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 936.509320] env[69796]: DEBUG nova.compute.provider_tree [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.520735] env[69796]: DEBUG nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 936.537134] env[69796]: DEBUG nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 936.567211] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Releasing lock "refresh_cache-d46a5e64-1de4-4f92-b06b-f5367ffea72f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.567469] env[69796]: DEBUG nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 936.567655] env[69796]: DEBUG nova.compute.manager [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] [instance: d46a5e64-1de4-4f92-b06b-f5367ffea72f] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 936.668924] env[69796]: INFO nova.scheduler.client.report [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Deleted allocations for instance 029d73b8-bca8-4225-b332-fea194dd3d1e [ 936.829788] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07793042-f74d-488f-be98-57f1f91eaad9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.838041] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30d0fe9-6291-4a1a-b8d1-e1da1be3631b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.871337] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e39c0ffc-e1bd-40dd-aa95-6313c0838ded {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.879471] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6ef43b-cb0d-46e0-9719-3b5a61729ad8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.893588] env[69796]: DEBUG nova.compute.provider_tree [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 936.927265] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.927568] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.180123] env[69796]: DEBUG oslo_concurrency.lockutils [None req-df2228c3-5d8f-4ef3-85f8-63821ef1e3b8 tempest-ServerRescueNegativeTestJSON-558396262 tempest-ServerRescueNegativeTestJSON-558396262-project-member] Lock "029d73b8-bca8-4225-b332-fea194dd3d1e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.020s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.426655] env[69796]: ERROR nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [req-9f066c3e-8a65-416c-9d12-940ca7b989b9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9f066c3e-8a65-416c-9d12-940ca7b989b9"}]} [ 937.426655] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.959s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.426808] env[69796]: ERROR nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Traceback (most recent call last): [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] yield [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] self.set_inventory_for_provider( [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 937.426808] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9f066c3e-8a65-416c-9d12-940ca7b989b9"}]} [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] During handling of the above exception, another exception occurred: [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Traceback (most recent call last): [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] with self.rt.instance_claim(context, instance, node, allocs, [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 937.427099] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] return f(*args, **kwargs) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] self._update(elevated, cn) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] self._update_to_placement(context, compute_node, startup) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] return attempt.get(self._wrap_exception) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] raise value [ 937.427452] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] self.reportclient.update_from_provider_tree( [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] with catch_all(pd.uuid): [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] self.gen.throw(typ, value, traceback) [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] raise exception.ResourceProviderSyncFailed() [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 937.427842] env[69796]: ERROR nova.compute.manager [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] [ 937.428175] env[69796]: DEBUG nova.compute.utils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 937.428520] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.345s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.430122] env[69796]: INFO nova.compute.claims [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 937.433626] env[69796]: DEBUG nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Build of instance 0002f8b8-91e0-4868-80c7-a70bcd9fc40c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 937.434062] env[69796]: DEBUG nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 937.434354] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "refresh_cache-0002f8b8-91e0-4868-80c7-a70bcd9fc40c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 937.434521] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquired lock "refresh_cache-0002f8b8-91e0-4868-80c7-a70bcd9fc40c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 937.434685] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 937.609129] env[69796]: INFO nova.scheduler.client.report [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Deleted allocations for instance d46a5e64-1de4-4f92-b06b-f5367ffea72f [ 937.682856] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.958027] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.054201] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.117198] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c0a9605c-2bbb-4ec8-b6b6-24645ed202fb tempest-ServersListShow296Test-80788894 tempest-ServersListShow296Test-80788894-project-member] Lock "d46a5e64-1de4-4f92-b06b-f5367ffea72f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.138s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.211479] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.469755] env[69796]: DEBUG nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 938.485246] env[69796]: DEBUG nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 938.485496] env[69796]: DEBUG nova.compute.provider_tree [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 938.503100] env[69796]: DEBUG nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 938.524736] env[69796]: DEBUG nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 938.560257] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Releasing lock "refresh_cache-0002f8b8-91e0-4868-80c7-a70bcd9fc40c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.560586] env[69796]: DEBUG nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 938.560868] env[69796]: DEBUG nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.561096] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.580018] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 938.620669] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 938.851194] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed768dc9-efbe-48cc-98d9-21ea12a3f203 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.859246] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7e7e13-79f2-4959-b4cd-826a0bbfb2f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.892982] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8679f4-6a8b-424f-b93a-63d3b2d349fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.901413] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b259dc-8c08-48d6-841c-1485ffcd59c2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.915833] env[69796]: DEBUG nova.compute.provider_tree [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 939.082322] env[69796]: DEBUG nova.network.neutron [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.154955] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.440548] env[69796]: ERROR nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [req-dd2aa02e-b4c1-4215-8158-5b40cea65e00] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dd2aa02e-b4c1-4215-8158-5b40cea65e00"}]} [ 939.440548] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.011s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.440816] env[69796]: ERROR nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Traceback (most recent call last): [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] yield [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] self.set_inventory_for_provider( [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 939.440816] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-dd2aa02e-b4c1-4215-8158-5b40cea65e00"}]} [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] During handling of the above exception, another exception occurred: [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Traceback (most recent call last): [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] with self.rt.instance_claim(context, instance, node, allocs, [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 939.441103] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] return f(*args, **kwargs) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] self._update(elevated, cn) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] self._update_to_placement(context, compute_node, startup) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] return attempt.get(self._wrap_exception) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] raise value [ 939.441441] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] self.reportclient.update_from_provider_tree( [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] with catch_all(pd.uuid): [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] self.gen.throw(typ, value, traceback) [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] raise exception.ResourceProviderSyncFailed() [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.441876] env[69796]: ERROR nova.compute.manager [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] [ 939.442264] env[69796]: DEBUG nova.compute.utils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 939.443189] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.242s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.443189] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.443189] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 939.444239] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.263s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.444239] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.444239] env[69796]: INFO nova.compute.manager [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] [instance: 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714] Successfully reverted task state from None on failure for instance. [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server [None req-e0d14fa8-8665-4c15-bb30-7ff818a02951 tempest-VolumesAdminNegativeTest-161696083 tempest-VolumesAdminNegativeTest-161696083-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server yield [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4d92aa08-a82a-4ce5-bc70-74b9307d60ab"}]} [ 939.448016] env[69796]: ERROR oslo_messaging.rpc.server [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 939.448455] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 939.449059] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 939.449880] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 939.450794] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 939.451737] env[69796]: ERROR oslo_messaging.rpc.server [ 939.451737] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.211s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.451737] env[69796]: INFO nova.compute.claims [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.453964] env[69796]: DEBUG nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Build of instance 5fa540ab-5cc9-4240-8c0d-3c92743d152f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 939.454898] env[69796]: DEBUG nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 939.455177] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquiring lock "refresh_cache-5fa540ab-5cc9-4240-8c0d-3c92743d152f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.455334] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Acquired lock "refresh_cache-5fa540ab-5cc9-4240-8c0d-3c92743d152f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.455500] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.458126] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8a8192-4c90-459e-99ef-3c057a752b9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.468685] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fd3cc6-92ec-459f-8db9-fc241723e744 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.488601] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5a108c-7a26-445d-ab51-77956cfcf7eb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.498361] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8171cff4-5f63-44b6-b687-20681664efb3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.534967] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 939.535146] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.587961] env[69796]: INFO nova.compute.manager [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 0002f8b8-91e0-4868-80c7-a70bcd9fc40c] Took 1.03 seconds to deallocate network for instance. [ 939.981786] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.061882] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.499369] env[69796]: DEBUG nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 940.514205] env[69796]: DEBUG nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 940.514464] env[69796]: DEBUG nova.compute.provider_tree [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 940.525840] env[69796]: DEBUG nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 940.545120] env[69796]: DEBUG nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 940.565019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Releasing lock "refresh_cache-5fa540ab-5cc9-4240-8c0d-3c92743d152f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.565019] env[69796]: DEBUG nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 940.565019] env[69796]: DEBUG nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.565402] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.582010] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 940.615880] env[69796]: INFO nova.scheduler.client.report [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Deleted allocations for instance 0002f8b8-91e0-4868-80c7-a70bcd9fc40c [ 940.794487] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81125f6b-2a90-459f-96f8-fca2f148ea7b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.802709] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f60d3d-796f-4e3b-8c97-737bc1322f40 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.840258] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd09011a-952e-4555-86a1-38b10badcd8a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.848901] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc2ed775-a466-4b81-bd67-4e01bd048307 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.862712] env[69796]: DEBUG nova.compute.provider_tree [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 941.084667] env[69796]: DEBUG nova.network.neutron [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.125522] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2f6020a0-f48e-4976-b61d-aa8b391bfdc1 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "0002f8b8-91e0-4868-80c7-a70bcd9fc40c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.873s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.384642] env[69796]: ERROR nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [req-11eb9310-306a-4e29-a775-fbf02bb5d4ce] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-11eb9310-306a-4e29-a775-fbf02bb5d4ce"}]} [ 941.385069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.937s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.385692] env[69796]: ERROR nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Traceback (most recent call last): [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] yield [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] self.set_inventory_for_provider( [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 941.385692] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-11eb9310-306a-4e29-a775-fbf02bb5d4ce"}]} [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] During handling of the above exception, another exception occurred: [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Traceback (most recent call last): [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] with self.rt.instance_claim(context, instance, node, allocs, [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 941.386048] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] return f(*args, **kwargs) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] self._update(elevated, cn) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] self._update_to_placement(context, compute_node, startup) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] return attempt.get(self._wrap_exception) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] six.reraise(self.value[0], self.value[1], self.value[2]) [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] raise value [ 941.386404] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] self.reportclient.update_from_provider_tree( [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] with catch_all(pd.uuid): [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] self.gen.throw(typ, value, traceback) [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] raise exception.ResourceProviderSyncFailed() [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 941.386787] env[69796]: ERROR nova.compute.manager [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] [ 941.387107] env[69796]: DEBUG nova.compute.utils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 941.387611] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.496s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.389477] env[69796]: INFO nova.compute.claims [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.392147] env[69796]: DEBUG nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Build of instance c5c38995-c5b4-457e-badf-7f2eabb203f3 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 941.392825] env[69796]: DEBUG nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 941.392825] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "refresh_cache-c5c38995-c5b4-457e-badf-7f2eabb203f3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.392961] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquired lock "refresh_cache-c5c38995-c5b4-457e-badf-7f2eabb203f3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 941.393092] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.589054] env[69796]: INFO nova.compute.manager [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] [instance: 5fa540ab-5cc9-4240-8c0d-3c92743d152f] Took 1.02 seconds to deallocate network for instance. [ 941.913863] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.987029] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.416146] env[69796]: DEBUG nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 942.428900] env[69796]: DEBUG nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 942.429118] env[69796]: DEBUG nova.compute.provider_tree [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.439257] env[69796]: DEBUG nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 942.453897] env[69796]: DEBUG nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 942.488190] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Releasing lock "refresh_cache-c5c38995-c5b4-457e-badf-7f2eabb203f3" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.488410] env[69796]: DEBUG nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 942.488596] env[69796]: DEBUG nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.488804] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.505970] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 942.616163] env[69796]: INFO nova.scheduler.client.report [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Deleted allocations for instance 5fa540ab-5cc9-4240-8c0d-3c92743d152f [ 942.692911] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835848f1-97d1-4a46-a78b-9620ca22cabf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.700565] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "7f6be89d-a63c-43c8-901a-feea613b35cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.700799] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "7f6be89d-a63c-43c8-901a-feea613b35cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.705488] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e466513d-06b3-43a1-b6fb-096c48b57f3d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.737826] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd9dd3f-3cf1-4423-bfc7-84cb952b0956 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.745147] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c426e4-de63-44ba-bcf9-34c7e34f89a4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.759316] env[69796]: DEBUG nova.compute.provider_tree [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 943.008352] env[69796]: DEBUG nova.network.neutron [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.126526] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ec09f2c9-c8c1-446a-9244-7a42bd55b2c5 tempest-AttachVolumeTestJSON-2145518873 tempest-AttachVolumeTestJSON-2145518873-project-member] Lock "5fa540ab-5cc9-4240-8c0d-3c92743d152f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.315s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.202984] env[69796]: DEBUG nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 943.280974] env[69796]: ERROR nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [req-6679a07d-36ca-4a8f-ab7a-960e1e9efaca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6679a07d-36ca-4a8f-ab7a-960e1e9efaca"}]} [ 943.281373] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.894s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.282033] env[69796]: ERROR nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Traceback (most recent call last): [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] yield [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] self.set_inventory_for_provider( [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 943.282033] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-6679a07d-36ca-4a8f-ab7a-960e1e9efaca"}]} [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] During handling of the above exception, another exception occurred: [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Traceback (most recent call last): [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] with self.rt.instance_claim(context, instance, node, allocs, [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 943.282376] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] return f(*args, **kwargs) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] self._update(elevated, cn) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] self._update_to_placement(context, compute_node, startup) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] return attempt.get(self._wrap_exception) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] raise value [ 943.282730] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] self.reportclient.update_from_provider_tree( [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] with catch_all(pd.uuid): [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] self.gen.throw(typ, value, traceback) [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] raise exception.ResourceProviderSyncFailed() [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 943.283229] env[69796]: ERROR nova.compute.manager [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] [ 943.283626] env[69796]: DEBUG nova.compute.utils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 943.283851] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.402s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.285435] env[69796]: INFO nova.compute.claims [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.289397] env[69796]: DEBUG nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Build of instance 815e394b-8c3e-4628-a2bf-2933e4cc960c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 943.289825] env[69796]: DEBUG nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 943.290068] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Acquiring lock "refresh_cache-815e394b-8c3e-4628-a2bf-2933e4cc960c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.290223] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Acquired lock "refresh_cache-815e394b-8c3e-4628-a2bf-2933e4cc960c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.290389] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 943.512044] env[69796]: INFO nova.compute.manager [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: c5c38995-c5b4-457e-badf-7f2eabb203f3] Took 1.02 seconds to deallocate network for instance. [ 943.727827] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.811335] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 943.930273] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.314322] env[69796]: DEBUG nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 944.329755] env[69796]: DEBUG nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 944.330019] env[69796]: DEBUG nova.compute.provider_tree [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.343261] env[69796]: DEBUG nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 944.363548] env[69796]: DEBUG nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 944.433051] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Releasing lock "refresh_cache-815e394b-8c3e-4628-a2bf-2933e4cc960c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.433375] env[69796]: DEBUG nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 944.433625] env[69796]: DEBUG nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 944.433859] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 944.454873] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 944.549553] env[69796]: INFO nova.scheduler.client.report [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Deleted allocations for instance c5c38995-c5b4-457e-badf-7f2eabb203f3 [ 944.673916] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbe0e56-c512-488e-b7e7-d609392e9d77 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.683698] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059e3a09-ca13-486d-9c38-84fa834adf32 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.716220] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860d763e-3c8e-41a1-b129-bff3923f2e44 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.727781] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17160a83-7762-4a1d-aa4e-2ad18fef2e41 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.739655] env[69796]: DEBUG nova.compute.provider_tree [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.958163] env[69796]: DEBUG nova.network.neutron [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.059451] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d733450f-f5b8-428d-9e3e-31bf41f538c1 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "c5c38995-c5b4-457e-badf-7f2eabb203f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.706s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.265722] env[69796]: ERROR nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [req-74bb3101-fcca-434c-a9f1-9cee08161917] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-74bb3101-fcca-434c-a9f1-9cee08161917"}]} [ 945.265722] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.982s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.266374] env[69796]: ERROR nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Traceback (most recent call last): [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] yield [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] self.set_inventory_for_provider( [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 945.266374] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-74bb3101-fcca-434c-a9f1-9cee08161917"}]} [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] During handling of the above exception, another exception occurred: [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Traceback (most recent call last): [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] with self.rt.instance_claim(context, instance, node, allocs, [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 945.266700] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] return f(*args, **kwargs) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] self._update(elevated, cn) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] self._update_to_placement(context, compute_node, startup) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] return attempt.get(self._wrap_exception) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] raise value [ 945.267133] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] self.reportclient.update_from_provider_tree( [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] with catch_all(pd.uuid): [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] self.gen.throw(typ, value, traceback) [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] raise exception.ResourceProviderSyncFailed() [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 945.267812] env[69796]: ERROR nova.compute.manager [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] [ 945.268378] env[69796]: DEBUG nova.compute.utils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 945.268378] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.257s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.269977] env[69796]: INFO nova.compute.claims [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.275421] env[69796]: DEBUG nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Build of instance 3af4d581-8476-4b47-a2e5-eb1c99deb2bb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 945.275421] env[69796]: DEBUG nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 945.275421] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquiring lock "refresh_cache-3af4d581-8476-4b47-a2e5-eb1c99deb2bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.275421] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Acquired lock "refresh_cache-3af4d581-8476-4b47-a2e5-eb1c99deb2bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.275738] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.394444] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "d746d66b-32df-4a4d-97bd-82b4ad364461" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.394739] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.394953] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.395930] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.395930] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.397741] env[69796]: INFO nova.compute.manager [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Terminating instance [ 945.463719] env[69796]: INFO nova.compute.manager [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] [instance: 815e394b-8c3e-4628-a2bf-2933e4cc960c] Took 1.03 seconds to deallocate network for instance. [ 945.794687] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 945.865939] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.904192] env[69796]: DEBUG nova.compute.manager [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.904430] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.905320] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b125d7f6-20ee-4dbe-a6a0-3f57a674c546 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.913874] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.914129] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1612fb4-8cdd-4676-9088-12ca74029cb0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.920189] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 945.920189] env[69796]: value = "task-4234405" [ 945.920189] env[69796]: _type = "Task" [ 945.920189] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.929015] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234405, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.298197] env[69796]: DEBUG nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 946.310379] env[69796]: DEBUG nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 946.310592] env[69796]: DEBUG nova.compute.provider_tree [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Updating resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 generation from 93 to 94 during operation: update_inventory {{(pid=69796) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 946.310754] env[69796]: DEBUG nova.compute.provider_tree [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 946.321216] env[69796]: DEBUG nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 946.337343] env[69796]: DEBUG nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 946.369631] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Releasing lock "refresh_cache-3af4d581-8476-4b47-a2e5-eb1c99deb2bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.370167] env[69796]: DEBUG nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 946.370167] env[69796]: DEBUG nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.370332] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.386887] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.433572] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234405, 'name': PowerOffVM_Task, 'duration_secs': 0.201365} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.433843] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.434039] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.434300] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39f448f0-ea6d-425b-a4bb-266e91dfbfb5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.490727] env[69796]: INFO nova.scheduler.client.report [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Deleted allocations for instance 815e394b-8c3e-4628-a2bf-2933e4cc960c [ 946.502671] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.502997] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.503331] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Deleting the datastore file [datastore2] d746d66b-32df-4a4d-97bd-82b4ad364461 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.503404] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e2269041-65ba-4ccb-9e75-e556d7316217 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.511111] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for the task: (returnval){ [ 946.511111] env[69796]: value = "task-4234407" [ 946.511111] env[69796]: _type = "Task" [ 946.511111] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.525649] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234407, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.608023] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55549e1-a7fc-4403-bb76-0912be733b3e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.616763] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55bcc468-d155-4107-900f-88653521aded {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.647292] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f631ac5-99a4-44ce-9c67-26d77ad3b014 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.655378] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19bd3af-0cd2-478f-84d8-948de432a6ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.670619] env[69796]: DEBUG nova.compute.provider_tree [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 946.769741] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.769977] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.892056] env[69796]: DEBUG nova.network.neutron [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.003173] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9c0d9aa1-03b2-464a-aab6-e0f3ca141e6a tempest-ServersNegativeTestJSON-1232781610 tempest-ServersNegativeTestJSON-1232781610-project-member] Lock "815e394b-8c3e-4628-a2bf-2933e4cc960c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.830s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.025824] env[69796]: DEBUG oslo_vmware.api [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Task: {'id': task-4234407, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154814} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.026357] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 947.026728] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 947.027074] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 947.027400] env[69796]: INFO nova.compute.manager [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Took 1.12 seconds to destroy the instance on the hypervisor. [ 947.027826] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.028156] env[69796]: DEBUG nova.compute.manager [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 947.028317] env[69796]: DEBUG nova.network.neutron [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.193910] env[69796]: ERROR nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [req-31ce8daf-17ef-4828-9457-b4a111cb554f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31ce8daf-17ef-4828-9457-b4a111cb554f"}]} [ 947.194295] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.926s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.194942] env[69796]: ERROR nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Traceback (most recent call last): [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] yield [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] self.set_inventory_for_provider( [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 947.194942] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31ce8daf-17ef-4828-9457-b4a111cb554f"}]} [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] During handling of the above exception, another exception occurred: [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Traceback (most recent call last): [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] with self.rt.instance_claim(context, instance, node, allocs, [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 947.195266] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] return f(*args, **kwargs) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] self._update(elevated, cn) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] self._update_to_placement(context, compute_node, startup) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] return attempt.get(self._wrap_exception) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] six.reraise(self.value[0], self.value[1], self.value[2]) [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] raise value [ 947.195593] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] self.reportclient.update_from_provider_tree( [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] with catch_all(pd.uuid): [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] self.gen.throw(typ, value, traceback) [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] raise exception.ResourceProviderSyncFailed() [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 947.196043] env[69796]: ERROR nova.compute.manager [instance: b2c473de-7aee-428d-87d2-d747f12402f4] [ 947.196477] env[69796]: DEBUG nova.compute.utils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 947.196923] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.179s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.198494] env[69796]: INFO nova.compute.claims [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.201216] env[69796]: DEBUG nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Build of instance b2c473de-7aee-428d-87d2-d747f12402f4 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 947.201664] env[69796]: DEBUG nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 947.201895] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Acquiring lock "refresh_cache-b2c473de-7aee-428d-87d2-d747f12402f4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.202069] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Acquired lock "refresh_cache-b2c473de-7aee-428d-87d2-d747f12402f4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.202231] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 947.273836] env[69796]: DEBUG nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 947.395079] env[69796]: INFO nova.compute.manager [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] [instance: 3af4d581-8476-4b47-a2e5-eb1c99deb2bb] Took 1.02 seconds to deallocate network for instance. [ 947.526552] env[69796]: DEBUG nova.compute.manager [req-d049c946-a1f1-406b-b241-1976fa648729 req-9eaa9ce4-128e-4ac5-a810-50ecb4f53470 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Received event network-vif-deleted-e84f8d87-5538-4cfd-ac81-c58c1b4fca74 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 947.526774] env[69796]: INFO nova.compute.manager [req-d049c946-a1f1-406b-b241-1976fa648729 req-9eaa9ce4-128e-4ac5-a810-50ecb4f53470 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Neutron deleted interface e84f8d87-5538-4cfd-ac81-c58c1b4fca74; detaching it from the instance and deleting it from the info cache [ 947.526945] env[69796]: DEBUG nova.network.neutron [req-d049c946-a1f1-406b-b241-1976fa648729 req-9eaa9ce4-128e-4ac5-a810-50ecb4f53470 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.734257] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 947.797611] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.858460] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.006451] env[69796]: DEBUG nova.network.neutron [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.029764] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f05af03-0311-44f2-a0f5-6c36d028e939 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.040384] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d290db7-f498-4c14-ab6a-8fe2f5bdbaac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.064582] env[69796]: DEBUG nova.compute.manager [req-d049c946-a1f1-406b-b241-1976fa648729 req-9eaa9ce4-128e-4ac5-a810-50ecb4f53470 service nova] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Detach interface failed, port_id=e84f8d87-5538-4cfd-ac81-c58c1b4fca74, reason: Instance d746d66b-32df-4a4d-97bd-82b4ad364461 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 948.231711] env[69796]: DEBUG nova.scheduler.client.report [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 948.249579] env[69796]: DEBUG nova.scheduler.client.report [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 948.249812] env[69796]: DEBUG nova.compute.provider_tree [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.261356] env[69796]: DEBUG nova.scheduler.client.report [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 948.289863] env[69796]: DEBUG nova.scheduler.client.report [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 948.361209] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Releasing lock "refresh_cache-b2c473de-7aee-428d-87d2-d747f12402f4" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.361825] env[69796]: DEBUG nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 948.361825] env[69796]: DEBUG nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 948.361825] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.380968] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 948.424555] env[69796]: INFO nova.scheduler.client.report [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Deleted allocations for instance 3af4d581-8476-4b47-a2e5-eb1c99deb2bb [ 948.508979] env[69796]: INFO nova.compute.manager [-] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Took 1.48 seconds to deallocate network for instance. [ 948.578046] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c15b61-4d8a-46c6-aaa3-e82a0496804e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.586634] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-389e8349-7d6a-43db-a894-fc99a59db86f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.617443] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d63d657-1da9-4154-99da-126ef239e4a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.625539] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80af3b61-31b3-4594-a767-f60be5b03f18 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.640201] env[69796]: DEBUG nova.compute.provider_tree [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.883995] env[69796]: DEBUG nova.network.neutron [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.934550] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ba3e682f-b4fe-424d-a8af-f50dc2e34bac tempest-AttachVolumeShelveTestJSON-258170 tempest-AttachVolumeShelveTestJSON-258170-project-member] Lock "3af4d581-8476-4b47-a2e5-eb1c99deb2bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.270s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.018681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.142597] env[69796]: DEBUG nova.scheduler.client.report [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.387111] env[69796]: INFO nova.compute.manager [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] [instance: b2c473de-7aee-428d-87d2-d747f12402f4] Took 1.03 seconds to deallocate network for instance. [ 949.648622] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.649233] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.651862] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.508s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.653407] env[69796]: INFO nova.compute.claims [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.157849] env[69796]: DEBUG nova.compute.utils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.161472] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.161725] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.440598] env[69796]: DEBUG nova.policy [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdcf2d22c98b45ad92e219e24b285b44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03701784af4041e29a23e885800ea39b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.453299] env[69796]: INFO nova.scheduler.client.report [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Deleted allocations for instance b2c473de-7aee-428d-87d2-d747f12402f4 [ 950.663888] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.938321] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86748936-f100-4134-88f0-e49a2026cfed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.948555] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61312835-8ee6-4aa9-ba1e-a28b6f86f8fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.953256] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Successfully created port: 640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.985810] env[69796]: DEBUG oslo_concurrency.lockutils [None req-667c5006-a02b-4173-90d2-cb39a7c1b976 tempest-ServerPasswordTestJSON-1602950814 tempest-ServerPasswordTestJSON-1602950814-project-member] Lock "b2c473de-7aee-428d-87d2-d747f12402f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.760s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.986857] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1fb944-8ab0-45f5-b0c2-acddb0ac9ea9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.996477] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd80907-41de-486b-81cf-a5d0d390a258 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.012059] env[69796]: DEBUG nova.compute.provider_tree [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.515181] env[69796]: DEBUG nova.scheduler.client.report [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.678886] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.718291] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 951.718291] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 951.718291] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 951.718454] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 951.718454] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 951.718454] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 951.718454] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 951.718454] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 951.718703] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 951.719162] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 951.719508] env[69796]: DEBUG nova.virt.hardware [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 951.720715] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c2d78c-27d4-49fe-8f6a-94881e1532db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.730450] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-234f0f13-0ba9-4f4f-bbd8-436355aa0d5f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.020756] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.021348] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 952.024236] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.813s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.025822] env[69796]: INFO nova.compute.claims [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.530858] env[69796]: DEBUG nova.compute.utils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.534582] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.534787] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.596639] env[69796]: DEBUG nova.policy [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e252b635ac624e7da1412444438025dd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '952c728c1bee4cefb057c7b71efe1344', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 952.602361] env[69796]: DEBUG nova.compute.manager [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Received event network-vif-plugged-640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 952.602854] env[69796]: DEBUG oslo_concurrency.lockutils [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.603721] env[69796]: DEBUG oslo_concurrency.lockutils [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.603968] env[69796]: DEBUG oslo_concurrency.lockutils [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.604192] env[69796]: DEBUG nova.compute.manager [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] No waiting events found dispatching network-vif-plugged-640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 952.604422] env[69796]: WARNING nova.compute.manager [req-7ca82504-4a1e-4b85-b007-e67b02f7e74e req-b3c6830b-d50e-40ec-aa7f-6245b4334fac service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Received unexpected event network-vif-plugged-640a621e-38ea-40b2-b71c-15fe3f0c1c42 for instance with vm_state building and task_state spawning. [ 952.775564] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Successfully updated port: 640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.951563] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Successfully created port: 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.035424] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.282858] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.283059] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.283255] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.292144] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cd1f95-2c0d-46a0-b374-ae0ff748ff99 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.301729] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b17ac5-637c-413f-9256-9d1c8d5db199 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.334533] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63edafed-561a-4054-838b-fee73e7cf187 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.342788] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e896463-4da5-41bc-a2b2-5809158680f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.357733] env[69796]: DEBUG nova.compute.provider_tree [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.816054] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.867015] env[69796]: DEBUG nova.scheduler.client.report [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.957192] env[69796]: DEBUG nova.network.neutron [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [{"id": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "address": "fa:16:3e:5b:65:e0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap640a621e-38", "ovs_interfaceid": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.958607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Acquiring lock "f9460639-e09c-4c4f-a0e1-a518730368bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.958912] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Lock "f9460639-e09c-4c4f-a0e1-a518730368bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.051807] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.079126] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 954.079385] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.079546] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 954.079732] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.079878] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 954.080037] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 954.080259] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 954.080422] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 954.080589] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 954.080752] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 954.080923] env[69796]: DEBUG nova.virt.hardware [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 954.081835] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b19e59-1ac2-43b4-8c80-5355db9427b2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.091119] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50768577-f788-4009-ab54-618e2cbf96d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.372585] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.373132] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 954.375882] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.221s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.377668] env[69796]: INFO nova.compute.claims [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.461249] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.461625] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance network_info: |[{"id": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "address": "fa:16:3e:5b:65:e0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap640a621e-38", "ovs_interfaceid": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 954.463382] env[69796]: DEBUG nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 954.464599] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:65:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '640a621e-38ea-40b2-b71c-15fe3f0c1c42', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.472267] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating folder: Project (03701784af4041e29a23e885800ea39b). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.473171] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Successfully updated port: 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.475534] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75c16979-f742-48de-9c97-e467c98b7179 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.490160] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created folder: Project (03701784af4041e29a23e885800ea39b) in parent group-v837766. [ 954.490273] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating folder: Instances. Parent ref: group-v837817. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.490497] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7ad9ec1-df79-4ed7-aebb-9651ba6cc5b7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.501152] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created folder: Instances in parent group-v837817. [ 954.501399] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.502220] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.502438] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95011d6c-3d26-4423-8dc9-5c3ba7f7af14 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.521800] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.521800] env[69796]: value = "task-4234410" [ 954.521800] env[69796]: _type = "Task" [ 954.521800] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.530060] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234410, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.633635] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Received event network-changed-640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 954.634044] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Refreshing instance network info cache due to event network-changed-640a621e-38ea-40b2-b71c-15fe3f0c1c42. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 954.634044] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Acquiring lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.634257] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Acquired lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.634468] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Refreshing network info cache for port 640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.882533] env[69796]: DEBUG nova.compute.utils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 954.886054] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 954.886271] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.922621] env[69796]: DEBUG nova.policy [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0caee67119e94ddb972fe671958a4aef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a013be517fea4fe59a57059de0fbeff7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.978580] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.978580] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquired lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.978580] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.996392] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.032837] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234410, 'name': CreateVM_Task, 'duration_secs': 0.309713} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.033074] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 955.033926] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.034145] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.034540] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 955.034853] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e71f5bc0-78bb-4079-94b6-cef9793b05e5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.040353] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 955.040353] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]520b046e-baf7-2b9e-51ce-24cb5c7db71e" [ 955.040353] env[69796]: _type = "Task" [ 955.040353] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.048786] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]520b046e-baf7-2b9e-51ce-24cb5c7db71e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.291657] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Successfully created port: 84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.386923] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 955.464051] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updated VIF entry in instance network info cache for port 640a621e-38ea-40b2-b71c-15fe3f0c1c42. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 955.464404] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [{"id": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "address": "fa:16:3e:5b:65:e0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap640a621e-38", "ovs_interfaceid": "640a621e-38ea-40b2-b71c-15fe3f0c1c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.529168] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 955.557955] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]520b046e-baf7-2b9e-51ce-24cb5c7db71e, 'name': SearchDatastore_Task, 'duration_secs': 0.013388} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.560724] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.560724] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.560885] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.561039] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.561328] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.561537] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82f78408-0c30-4557-b156-6fd4931dca70 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.573904] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.574144] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.574951] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24994857-a7e7-4316-ba4a-5aa67c054ed9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.582172] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 955.582172] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f8b9b5-db59-3a29-3712-bbee89eb4c16" [ 955.582172] env[69796]: _type = "Task" [ 955.582172] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.591099] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f8b9b5-db59-3a29-3712-bbee89eb4c16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.672315] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7936fdb-f57a-443b-aa8c-c8541b3f3fd0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.680326] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd28b90-c284-40b6-960c-796674e1a7ca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.713134] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c08384-1dbf-4d0f-a9f2-dfa5a5d1c344 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.721378] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4abd0d60-5754-482d-9ee0-63ee3208e4c6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.736104] env[69796]: DEBUG nova.compute.provider_tree [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.746580] env[69796]: DEBUG nova.network.neutron [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updating instance_info_cache with network_info: [{"id": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "address": "fa:16:3e:ca:81:43", "network": {"id": "e736eb9c-66d5-48db-ab5d-4525bafef1d8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-973661727-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "952c728c1bee4cefb057c7b71efe1344", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76652ff3-72", "ovs_interfaceid": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.967324] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Releasing lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.967712] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Received event network-vif-plugged-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 955.967807] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Acquiring lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.968048] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.968226] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.968397] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] No waiting events found dispatching network-vif-plugged-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 955.968605] env[69796]: WARNING nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Received unexpected event network-vif-plugged-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 for instance with vm_state building and task_state spawning. [ 955.968803] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Received event network-changed-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 955.968959] env[69796]: DEBUG nova.compute.manager [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Refreshing instance network info cache due to event network-changed-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 955.969149] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Acquiring lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.093102] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f8b9b5-db59-3a29-3712-bbee89eb4c16, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.093938] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27c15e87-1b98-4286-a4fc-9b22b7901e39 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.099699] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 956.099699] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]525a7fc9-847c-b2a6-8a4b-dbd4951269a7" [ 956.099699] env[69796]: _type = "Task" [ 956.099699] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.110064] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]525a7fc9-847c-b2a6-8a4b-dbd4951269a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.240117] env[69796]: DEBUG nova.scheduler.client.report [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.248780] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Releasing lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.249141] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Instance network_info: |[{"id": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "address": "fa:16:3e:ca:81:43", "network": {"id": "e736eb9c-66d5-48db-ab5d-4525bafef1d8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-973661727-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "952c728c1bee4cefb057c7b71efe1344", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76652ff3-72", "ovs_interfaceid": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.249482] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Acquired lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.249699] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Refreshing network info cache for port 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.250842] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:81:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b29df12-5674-476d-a9e5-5e20f704d224', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76652ff3-72d0-410c-abd6-d0e0e4bfcdc7', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.258538] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Creating folder: Project (952c728c1bee4cefb057c7b71efe1344). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.261971] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-808d0afe-0b01-4b10-aba8-f8491895d22d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.272924] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Created folder: Project (952c728c1bee4cefb057c7b71efe1344) in parent group-v837766. [ 956.273147] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Creating folder: Instances. Parent ref: group-v837820. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.273415] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88d7e554-faee-4f28-be81-e1e80e9b231e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.283878] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Created folder: Instances in parent group-v837820. [ 956.284053] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.284238] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.284486] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01f6a7a2-8d4e-46a5-9c16-b65481d8fda0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.305331] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.305331] env[69796]: value = "task-4234413" [ 956.305331] env[69796]: _type = "Task" [ 956.305331] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.313722] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234413, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.400286] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 956.428440] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 956.428707] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.428909] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 956.430051] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.430051] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 956.430051] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 956.430051] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 956.430051] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 956.430344] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 956.430344] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 956.430344] env[69796]: DEBUG nova.virt.hardware [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 956.431151] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76038b5-4ada-44f3-b273-4b2c0c078e9a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.439745] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-346289c5-9173-404f-864f-9aed88baf67b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.494922] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updated VIF entry in instance network info cache for port 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.495402] env[69796]: DEBUG nova.network.neutron [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updating instance_info_cache with network_info: [{"id": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "address": "fa:16:3e:ca:81:43", "network": {"id": "e736eb9c-66d5-48db-ab5d-4525bafef1d8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-973661727-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "952c728c1bee4cefb057c7b71efe1344", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76652ff3-72", "ovs_interfaceid": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.611032] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]525a7fc9-847c-b2a6-8a4b-dbd4951269a7, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.611296] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.611466] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7/d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.611637] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-25be3838-4b52-4637-875a-d4ea99e9b9e3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.618460] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 956.618460] env[69796]: value = "task-4234414" [ 956.618460] env[69796]: _type = "Task" [ 956.618460] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.626781] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.746889] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.746889] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 956.749685] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.214s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.816911] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234413, 'name': CreateVM_Task, 'duration_secs': 0.296287} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.817126] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.817896] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.818071] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.818403] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 956.818680] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e1d5977-807a-46c0-9c0f-b52edb154919 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.826826] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 956.826826] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cfe9aa-e524-f8b9-ac1a-cd9bcd9f4e53" [ 956.826826] env[69796]: _type = "Task" [ 956.826826] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.836467] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cfe9aa-e524-f8b9-ac1a-cd9bcd9f4e53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.869223] env[69796]: DEBUG nova.compute.manager [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Received event network-vif-plugged-84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 956.869564] env[69796]: DEBUG oslo_concurrency.lockutils [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.869829] env[69796]: DEBUG oslo_concurrency.lockutils [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.870012] env[69796]: DEBUG oslo_concurrency.lockutils [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.870188] env[69796]: DEBUG nova.compute.manager [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] No waiting events found dispatching network-vif-plugged-84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 956.870352] env[69796]: WARNING nova.compute.manager [req-505517c5-b779-4669-bc1a-cbd4106b7833 req-fe58e88e-fab6-4d1d-8e0d-f61a8369439c service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Received unexpected event network-vif-plugged-84c5a39e-6ab2-4353-8648-eb3fc939be20 for instance with vm_state building and task_state spawning. [ 956.959293] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Successfully updated port: 84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 956.998198] env[69796]: DEBUG oslo_concurrency.lockutils [req-69b0a239-060c-4827-8296-51a0569256c4 req-e1ee9eea-c2d6-41ca-aa92-ea1bbeae8ac8 service nova] Releasing lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.133218] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234414, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.253599] env[69796]: DEBUG nova.compute.utils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.261303] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.261506] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.299645] env[69796]: DEBUG nova.policy [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '110ad71872a445e5bc422bb8392afef1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a47349b96df54bbb870c223414206b63', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.338916] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cfe9aa-e524-f8b9-ac1a-cd9bcd9f4e53, 'name': SearchDatastore_Task, 'duration_secs': 0.056904} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.338916] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.338916] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.338916] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.339292] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.339292] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.339403] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6148bc08-9af9-442a-896a-31596584b0e1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.348376] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.348569] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.349332] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fb7396e-300b-4424-91f1-2b1f9f3ecfd6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.354769] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 957.354769] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52d56a94-bd05-5f96-bec8-6c9ebf83e19f" [ 957.354769] env[69796]: _type = "Task" [ 957.354769] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.363319] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52d56a94-bd05-5f96-bec8-6c9ebf83e19f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.463237] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.463493] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.463627] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.578739] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Successfully created port: a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.630355] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534611} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.630656] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7/d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.630883] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.631195] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5af4d385-9a0e-47d6-800b-3f6e53486f03 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.640173] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 957.640173] env[69796]: value = "task-4234415" [ 957.640173] env[69796]: _type = "Task" [ 957.640173] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.651975] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234415, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.764295] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 957.868404] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52d56a94-bd05-5f96-bec8-6c9ebf83e19f, 'name': SearchDatastore_Task, 'duration_secs': 0.009827} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.868404] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42fcb8cf-bcaf-4938-9217-b79b184c5524 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.873036] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 957.873036] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5238670f-e9f7-0e5c-769f-bacbe0101801" [ 957.873036] env[69796]: _type = "Task" [ 957.873036] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.881244] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5238670f-e9f7-0e5c-769f-bacbe0101801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.998234] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.125825] env[69796]: DEBUG nova.network.neutron [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating instance_info_cache with network_info: [{"id": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "address": "fa:16:3e:91:76:c5", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84c5a39e-6a", "ovs_interfaceid": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.150521] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234415, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103151} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.150809] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.151701] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d987790b-fb27-4668-8191-905906ace78a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.175909] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7/d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.176259] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ff374ed-8df3-4699-aee6-a93a6835adde {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.197181] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 958.197181] env[69796]: value = "task-4234416" [ 958.197181] env[69796]: _type = "Task" [ 958.197181] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.206739] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.299486] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.299754] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.299945] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300109] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300242] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300392] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300522] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300640] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.300794] env[69796]: WARNING nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d746d66b-32df-4a4d-97bd-82b4ad364461 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 958.300912] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.301039] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.301187] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.301307] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.301443] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 78da661c-9020-40d1-b2e7-bc844c0bdbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.301550] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 958.384427] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5238670f-e9f7-0e5c-769f-bacbe0101801, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.384820] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.385133] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7/e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.385487] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e975df9b-ac3c-4219-b515-37843a476d9e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.393450] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 958.393450] env[69796]: value = "task-4234417" [ 958.393450] env[69796]: _type = "Task" [ 958.393450] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.401637] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234417, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.628423] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.628826] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Instance network_info: |[{"id": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "address": "fa:16:3e:91:76:c5", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84c5a39e-6a", "ovs_interfaceid": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 958.629352] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:76:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84c5a39e-6ab2-4353-8648-eb3fc939be20', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.636938] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Creating folder: Project (a013be517fea4fe59a57059de0fbeff7). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.637284] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a17b299d-f78e-4856-bfb5-b7a31d738758 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.649605] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Created folder: Project (a013be517fea4fe59a57059de0fbeff7) in parent group-v837766. [ 958.649976] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Creating folder: Instances. Parent ref: group-v837823. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.650330] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a85124b1-d753-4eb2-b920-1e5103c29ef6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.663819] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Created folder: Instances in parent group-v837823. [ 958.664052] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.664245] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.664465] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7388ab09-423e-40a8-bb8b-774b2c747699 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.687780] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.687780] env[69796]: value = "task-4234420" [ 958.687780] env[69796]: _type = "Task" [ 958.687780] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.698491] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234420, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.708432] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234416, 'name': ReconfigVM_Task, 'duration_secs': 0.302572} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.709112] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfigured VM instance instance-00000053 to attach disk [datastore2] d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7/d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.709398] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b84e76d-bb76-4d36-8851-23d47b8017c0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.720267] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 958.720267] env[69796]: value = "task-4234421" [ 958.720267] env[69796]: _type = "Task" [ 958.720267] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.732600] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234421, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.780286] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 958.804429] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f6be89d-a63c-43c8-901a-feea613b35cf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 958.813538] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.813817] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.813981] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.814263] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.814389] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.814626] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.815922] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.815922] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.815922] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.815922] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.815922] env[69796]: DEBUG nova.virt.hardware [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.817144] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c776ec71-d415-4d7d-b65f-dffa30025bf3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.828313] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c22445-8929-4136-814e-919ac579472f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.904520] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234417, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.973405] env[69796]: DEBUG nova.compute.manager [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Received event network-changed-84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 958.973521] env[69796]: DEBUG nova.compute.manager [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Refreshing instance network info cache due to event network-changed-84c5a39e-6ab2-4353-8648-eb3fc939be20. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 958.973843] env[69796]: DEBUG oslo_concurrency.lockutils [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] Acquiring lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.974101] env[69796]: DEBUG oslo_concurrency.lockutils [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] Acquired lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.974561] env[69796]: DEBUG nova.network.neutron [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Refreshing network info cache for port 84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.103294] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Successfully updated port: a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 959.198719] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234420, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.230923] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234421, 'name': Rename_Task, 'duration_secs': 0.25923} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.231243] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.231563] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc79b491-33dd-4dd4-8540-ea6e4b054653 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.239867] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 959.239867] env[69796]: value = "task-4234422" [ 959.239867] env[69796]: _type = "Task" [ 959.239867] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.250160] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.309655] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 959.404405] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234417, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542104} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.404662] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7/e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.404878] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.405227] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-481aba8f-c023-48d0-b321-212ea349bf27 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.412483] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 959.412483] env[69796]: value = "task-4234423" [ 959.412483] env[69796]: _type = "Task" [ 959.412483] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.422295] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234423, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.606924] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.607141] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.607463] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 959.705253] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234420, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.750369] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.812516] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f9460639-e09c-4c4f-a0e1-a518730368bb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 959.812837] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 959.813010] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 959.875683] env[69796]: DEBUG nova.network.neutron [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updated VIF entry in instance network info cache for port 84c5a39e-6ab2-4353-8648-eb3fc939be20. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.876038] env[69796]: DEBUG nova.network.neutron [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating instance_info_cache with network_info: [{"id": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "address": "fa:16:3e:91:76:c5", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84c5a39e-6a", "ovs_interfaceid": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.923031] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234423, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091286} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.923031] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.923778] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a995093-3ded-4716-9e2e-9431c50d39b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.949949] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7/e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.952882] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c0dd05a-8c29-4186-b113-0fa8fd2c6dce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.973939] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 959.973939] env[69796]: value = "task-4234424" [ 959.973939] env[69796]: _type = "Task" [ 959.973939] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.986735] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234424, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.070981] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b83190-a313-4ae0-800f-63c82fb12e32 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.085172] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17529ca-2dd4-4fcd-8833-324f42ef0c6c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.118113] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6700b0-4a79-48d3-ae17-282cbe3c8a88 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.127469] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98129efe-8125-4eae-8657-a734092bf872 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.149678] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 960.158450] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 960.203641] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234420, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.250756] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.289806] env[69796]: DEBUG nova.network.neutron [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.379438] env[69796]: DEBUG oslo_concurrency.lockutils [req-d4baaa21-3600-438a-9efa-fc4acd29200d req-310fca26-1969-41a1-91e0-1c34f614c064 service nova] Releasing lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.484361] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234424, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.672436] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-e4eed3cd-1598-4818-8430-596ae09e0bd1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e4eed3cd-1598-4818-8430-596ae09e0bd1"}]} [ 960.672782] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.923s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.673494] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 960.673494] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 960.673494] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 960.673494] env[69796]: ERROR nova.compute.manager yield [ 960.673494] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 960.673494] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 960.673494] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 960.673494] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 960.673494] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e4eed3cd-1598-4818-8430-596ae09e0bd1"}]} [ 960.673494] env[69796]: ERROR nova.compute.manager [ 960.673494] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 960.673494] env[69796]: ERROR nova.compute.manager [ 960.673938] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 960.673938] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 960.673938] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 960.673938] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 960.673938] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 960.673938] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 960.673938] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 960.673938] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 960.673938] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 960.673938] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 960.674596] env[69796]: ERROR nova.compute.manager raise value [ 960.674596] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 960.674596] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 960.674596] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 960.674596] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 960.674596] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 960.674596] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 960.674596] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 960.674596] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 960.674596] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 960.674596] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 960.674596] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 960.674596] env[69796]: ERROR nova.compute.manager [ 960.674596] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.946s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.675489] env[69796]: INFO nova.compute.claims [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.709026] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234420, 'name': CreateVM_Task, 'duration_secs': 1.517} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.709458] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.711052] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.711052] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.711444] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.711828] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21d223d5-0fd4-4496-97da-4a1a0651390a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.719030] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 960.719030] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5226bd58-4a72-1991-5765-bca8069d17e2" [ 960.719030] env[69796]: _type = "Task" [ 960.719030] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.731534] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5226bd58-4a72-1991-5765-bca8069d17e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.751645] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234422, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.792891] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.793308] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Instance network_info: |[{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 960.793766] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:6e:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e05affa-2640-435e-a124-0ee8a6ab1152', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a447dcfd-590e-4909-adfb-d7b2edf8b91d', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 960.801162] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Creating folder: Project (a47349b96df54bbb870c223414206b63). Parent ref: group-v837766. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 960.801756] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91272c93-be2d-4932-878e-79d4bb35a181 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.813559] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Created folder: Project (a47349b96df54bbb870c223414206b63) in parent group-v837766. [ 960.813789] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Creating folder: Instances. Parent ref: group-v837826. {{(pid=69796) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 960.813985] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63531a76-82c3-453a-b254-13f71362ad97 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.824438] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Created folder: Instances in parent group-v837826. [ 960.824785] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 960.825078] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 960.825422] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-457d7c25-73ef-4dc9-bc66-f5a3c1662bbe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.845166] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 960.845166] env[69796]: value = "task-4234427" [ 960.845166] env[69796]: _type = "Task" [ 960.845166] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.853265] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234427, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.984939] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234424, 'name': ReconfigVM_Task, 'duration_secs': 0.683075} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.985254] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Reconfigured VM instance instance-00000054 to attach disk [datastore2] e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7/e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.985933] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66966b1a-42c3-4653-a236-6e87732e3a7a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.992901] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 960.992901] env[69796]: value = "task-4234428" [ 960.992901] env[69796]: _type = "Task" [ 960.992901] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.002379] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234428, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.004590] env[69796]: DEBUG nova.compute.manager [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-vif-plugged-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 961.004794] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Acquiring lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.005047] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.005237] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.005403] env[69796]: DEBUG nova.compute.manager [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] No waiting events found dispatching network-vif-plugged-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 961.005569] env[69796]: WARNING nova.compute.manager [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received unexpected event network-vif-plugged-a447dcfd-590e-4909-adfb-d7b2edf8b91d for instance with vm_state building and task_state spawning. [ 961.005732] env[69796]: DEBUG nova.compute.manager [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 961.005881] env[69796]: DEBUG nova.compute.manager [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing instance network info cache due to event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 961.006079] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.006221] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.006387] env[69796]: DEBUG nova.network.neutron [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.231983] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]5226bd58-4a72-1991-5765-bca8069d17e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011649} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.232384] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.232595] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.232786] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.232933] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.233135] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.233408] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d16399a7-2f23-4bf1-889b-6734fcf2b9d6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.248064] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.248269] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.249378] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5beb8b9c-9155-473d-86f4-b35babb07c46 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.254905] env[69796]: DEBUG oslo_vmware.api [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234422, 'name': PowerOnVM_Task, 'duration_secs': 1.517928} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.255576] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.255741] env[69796]: INFO nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 9.58 seconds to spawn the instance on the hypervisor. [ 961.255923] env[69796]: DEBUG nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 961.256771] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c76e0d0-db4e-4f29-8889-5e59be67d173 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.260784] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 961.260784] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52978055-28d2-9ea6-8204-fd263e08a4ab" [ 961.260784] env[69796]: _type = "Task" [ 961.260784] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.274343] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52978055-28d2-9ea6-8204-fd263e08a4ab, 'name': SearchDatastore_Task, 'duration_secs': 0.010745} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.275138] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0da2f6f-dd28-44fc-b40c-ae938f01cb07 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.280446] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 961.280446] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3af8-7987-2653-87c2-d45ba0a12c74" [ 961.280446] env[69796]: _type = "Task" [ 961.280446] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.288832] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3af8-7987-2653-87c2-d45ba0a12c74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.355762] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234427, 'name': CreateVM_Task, 'duration_secs': 0.292613} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.355939] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 961.356712] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.356876] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.357229] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 961.357493] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe0e7cfa-d1d4-4461-bfcc-092e1c7e1e8c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.363337] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 961.363337] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52260823-5d4f-d991-139a-da6d094402cd" [ 961.363337] env[69796]: _type = "Task" [ 961.363337] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.372878] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52260823-5d4f-d991-139a-da6d094402cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.503009] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234428, 'name': Rename_Task, 'duration_secs': 0.149361} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.503308] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 961.503549] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64a2ce1e-9939-4390-99b4-4d5f0ff63399 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.511903] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 961.511903] env[69796]: value = "task-4234429" [ 961.511903] env[69796]: _type = "Task" [ 961.511903] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.520127] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234429, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.697612] env[69796]: DEBUG nova.network.neutron [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updated VIF entry in instance network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.697813] env[69796]: DEBUG nova.network.neutron [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.705940] env[69796]: DEBUG nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 961.723614] env[69796]: DEBUG nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 961.723765] env[69796]: DEBUG nova.compute.provider_tree [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.736476] env[69796]: DEBUG nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 961.757367] env[69796]: DEBUG nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 961.777269] env[69796]: INFO nova.compute.manager [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 27.78 seconds to build instance. [ 961.795801] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3af8-7987-2653-87c2-d45ba0a12c74, 'name': SearchDatastore_Task, 'duration_secs': 0.009957} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.796942] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.797260] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 78da661c-9020-40d1-b2e7-bc844c0bdbb0/78da661c-9020-40d1-b2e7-bc844c0bdbb0.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 961.800120] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53d7a541-8b8b-4680-ba92-982a0e76c141 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.808553] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 961.808553] env[69796]: value = "task-4234430" [ 961.808553] env[69796]: _type = "Task" [ 961.808553] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.818924] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234430, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.877190] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52260823-5d4f-d991-139a-da6d094402cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010687} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.877618] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.877932] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 961.878278] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.878433] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.878615] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 961.878897] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1507b1b-9d9d-45d0-a180-cee7633c494a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.888847] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 961.889075] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 961.889855] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a523e8f3-effe-46a6-b3a6-9ec6476cdddc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.899037] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 961.899037] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]527afb26-dfed-b544-ffb2-a403ab59a1bc" [ 961.899037] env[69796]: _type = "Task" [ 961.899037] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.910750] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]527afb26-dfed-b544-ffb2-a403ab59a1bc, 'name': SearchDatastore_Task, 'duration_secs': 0.010701} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.914429] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a26446f-f75b-4907-9a12-56ea1ab461a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.921057] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 961.921057] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52473b96-9e6d-f96e-99bd-1f8397eb09bc" [ 961.921057] env[69796]: _type = "Task" [ 961.921057] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.931244] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52473b96-9e6d-f96e-99bd-1f8397eb09bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.021843] env[69796]: DEBUG oslo_vmware.api [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234429, 'name': PowerOnVM_Task, 'duration_secs': 0.453218} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.024767] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 962.025026] env[69796]: INFO nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Took 7.97 seconds to spawn the instance on the hypervisor. [ 962.025219] env[69796]: DEBUG nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 962.026265] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d04a75-8a35-4e42-ba30-19292ac2228d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.039215] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3cb29a6-fc51-4334-9487-368a24636124 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.048335] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c624a52-eee6-4fc2-b044-808d948038d1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.084359] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca724cf-2ef2-44b1-b656-46939fbf1b9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.094487] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4155b5-21ef-49e9-9207-b0842b93456f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.111656] env[69796]: DEBUG nova.compute.provider_tree [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.201361] env[69796]: DEBUG oslo_concurrency.lockutils [req-9356cc28-67db-4924-8e77-bd149e97f0c7 req-ddba3043-d5f2-403f-83a2-da586afb6c0b service nova] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.280021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-65c5a7fc-fd6d-4ddf-9dee-159a414e8928 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.754s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.319897] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234430, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489318} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.319963] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 78da661c-9020-40d1-b2e7-bc844c0bdbb0/78da661c-9020-40d1-b2e7-bc844c0bdbb0.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.320222] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.320455] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31b55ecc-7c45-41cc-98e5-b54634f0af34 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.327235] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 962.327235] env[69796]: value = "task-4234431" [ 962.327235] env[69796]: _type = "Task" [ 962.327235] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.335863] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234431, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.432164] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52473b96-9e6d-f96e-99bd-1f8397eb09bc, 'name': SearchDatastore_Task, 'duration_secs': 0.011536} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.432451] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.432736] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/983f57b3-3bfb-41ce-a924-d48c72d25c9f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.433034] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-542c8be4-2048-40f2-bd13-2b43d87e246a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.440842] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 962.440842] env[69796]: value = "task-4234432" [ 962.440842] env[69796]: _type = "Task" [ 962.440842] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.449363] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.553926] env[69796]: INFO nova.compute.manager [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Took 26.43 seconds to build instance. [ 962.639490] env[69796]: ERROR nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [req-8fc137ea-89e0-42a5-ad6d-671c24a87c0d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8fc137ea-89e0-42a5-ad6d-671c24a87c0d"}]} [ 962.642019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.966s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.642019] env[69796]: ERROR nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Traceback (most recent call last): [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] yield [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] self.set_inventory_for_provider( [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 962.642019] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-8fc137ea-89e0-42a5-ad6d-671c24a87c0d"}]} [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] During handling of the above exception, another exception occurred: [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Traceback (most recent call last): [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] with self.rt.instance_claim(context, instance, node, allocs, [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 962.642400] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] return f(*args, **kwargs) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] self._update(elevated, cn) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] self._update_to_placement(context, compute_node, startup) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] return attempt.get(self._wrap_exception) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] six.reraise(self.value[0], self.value[1], self.value[2]) [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] raise value [ 962.642741] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] self.reportclient.update_from_provider_tree( [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] with catch_all(pd.uuid): [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] self.gen.throw(typ, value, traceback) [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] raise exception.ResourceProviderSyncFailed() [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 962.643204] env[69796]: ERROR nova.compute.manager [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] [ 962.643580] env[69796]: DEBUG nova.compute.utils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 962.647021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.847s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.647352] env[69796]: INFO nova.compute.claims [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.651157] env[69796]: DEBUG nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Build of instance 7f6be89d-a63c-43c8-901a-feea613b35cf was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 962.651511] env[69796]: DEBUG nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 962.651898] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "refresh_cache-7f6be89d-a63c-43c8-901a-feea613b35cf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.653456] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquired lock "refresh_cache-7f6be89d-a63c-43c8-901a-feea613b35cf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.653456] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.840141] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234431, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080487} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.840141] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.840141] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604c90fd-76ac-4256-9389-666b24a4f1ed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.864093] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 78da661c-9020-40d1-b2e7-bc844c0bdbb0/78da661c-9020-40d1-b2e7-bc844c0bdbb0.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.864093] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41c6760f-7ee5-4854-9b44-be217f4a4fd2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.888024] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 962.888024] env[69796]: value = "task-4234433" [ 962.888024] env[69796]: _type = "Task" [ 962.888024] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.897395] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.951403] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493633} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.951674] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/983f57b3-3bfb-41ce-a924-d48c72d25c9f.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 962.951890] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 962.952171] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48c51d4c-3ec1-4be0-9493-c97ee067f55b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.959335] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 962.959335] env[69796]: value = "task-4234434" [ 962.959335] env[69796]: _type = "Task" [ 962.959335] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.967855] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.059710] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4694b392-d571-4e7f-ae52-5fe5d1500d4d tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.661s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.115030] env[69796]: DEBUG nova.compute.manager [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Received event network-changed-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 963.115185] env[69796]: DEBUG nova.compute.manager [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Refreshing instance network info cache due to event network-changed-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 963.115389] env[69796]: DEBUG oslo_concurrency.lockutils [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] Acquiring lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.115547] env[69796]: DEBUG oslo_concurrency.lockutils [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] Acquired lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.115714] env[69796]: DEBUG nova.network.neutron [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Refreshing network info cache for port 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.174494] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.255807] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.399399] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.470103] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067867} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.470623] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.471515] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc603328-b3f2-4066-a296-f2eedbdb5ed0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.495047] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/983f57b3-3bfb-41ce-a924-d48c72d25c9f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.495307] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-063d9ef2-4622-473d-a8ad-97a7384c3cbd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.515905] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 963.515905] env[69796]: value = "task-4234435" [ 963.515905] env[69796]: _type = "Task" [ 963.515905] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.524156] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234435, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.607075] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.607286] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.680619] env[69796]: DEBUG nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 963.697652] env[69796]: DEBUG nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 963.697916] env[69796]: DEBUG nova.compute.provider_tree [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.711122] env[69796]: DEBUG nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 963.731365] env[69796]: DEBUG nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 963.760454] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Releasing lock "refresh_cache-7f6be89d-a63c-43c8-901a-feea613b35cf" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.760844] env[69796]: DEBUG nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 963.761136] env[69796]: DEBUG nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 963.761403] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 963.781636] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.863770] env[69796]: DEBUG nova.network.neutron [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updated VIF entry in instance network info cache for port 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 963.864287] env[69796]: DEBUG nova.network.neutron [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updating instance_info_cache with network_info: [{"id": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "address": "fa:16:3e:ca:81:43", "network": {"id": "e736eb9c-66d5-48db-ab5d-4525bafef1d8", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-973661727-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "952c728c1bee4cefb057c7b71efe1344", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b29df12-5674-476d-a9e5-5e20f704d224", "external-id": "nsx-vlan-transportzone-754", "segmentation_id": 754, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76652ff3-72", "ovs_interfaceid": "76652ff3-72d0-410c-abd6-d0e0e4bfcdc7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.903126] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234433, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.002641] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4087f4f-ac86-421f-bfb9-60c250e6ecf0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.012783] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4257b8f3-e539-4460-bd34-da0692cfc502 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.025068] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.049546] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a8db9e-e030-4d76-8bae-b90925cbdaf8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.057356] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46656b39-9f5d-4305-9ae8-dcca54f3acb0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.070927] env[69796]: DEBUG nova.compute.provider_tree [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.110958] env[69796]: DEBUG nova.compute.utils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 964.284534] env[69796]: DEBUG nova.network.neutron [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.367631] env[69796]: DEBUG oslo_concurrency.lockutils [req-ff157afa-bc0a-46b7-9c23-d8fdcd84e932 req-0a45bc8e-f1ed-413b-b664-87cef305154f service nova] Releasing lock "refresh_cache-e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.399434] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234433, 'name': ReconfigVM_Task, 'duration_secs': 1.062863} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.399891] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 78da661c-9020-40d1-b2e7-bc844c0bdbb0/78da661c-9020-40d1-b2e7-bc844c0bdbb0.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.400342] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f1569cb-9fa1-4121-a9a1-a5eafcd45ae6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.407421] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 964.407421] env[69796]: value = "task-4234436" [ 964.407421] env[69796]: _type = "Task" [ 964.407421] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.417721] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234436, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.526640] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234435, 'name': ReconfigVM_Task, 'duration_secs': 0.646338} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.526947] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/983f57b3-3bfb-41ce-a924-d48c72d25c9f.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.527599] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-514922b5-6358-43dd-bc9d-066652836f4a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.534725] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 964.534725] env[69796]: value = "task-4234437" [ 964.534725] env[69796]: _type = "Task" [ 964.534725] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.543550] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234437, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.592426] env[69796]: ERROR nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [req-9bb91150-1c2f-4731-ac2d-cc8a63c816bd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9bb91150-1c2f-4731-ac2d-cc8a63c816bd"}]} [ 964.592866] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.948s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.593588] env[69796]: ERROR nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Traceback (most recent call last): [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] yield [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] self.set_inventory_for_provider( [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 964.593588] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9bb91150-1c2f-4731-ac2d-cc8a63c816bd"}]} [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] During handling of the above exception, another exception occurred: [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Traceback (most recent call last): [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] with self.rt.instance_claim(context, instance, node, allocs, [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 964.593963] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] return f(*args, **kwargs) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] self._update(elevated, cn) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] self._update_to_placement(context, compute_node, startup) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] return attempt.get(self._wrap_exception) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] six.reraise(self.value[0], self.value[1], self.value[2]) [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] raise value [ 964.594765] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] self.reportclient.update_from_provider_tree( [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] with catch_all(pd.uuid): [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] self.gen.throw(typ, value, traceback) [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] raise exception.ResourceProviderSyncFailed() [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 964.595552] env[69796]: ERROR nova.compute.manager [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] [ 964.595973] env[69796]: DEBUG nova.compute.utils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 964.595973] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.578s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.596264] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.598586] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.602s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.600293] env[69796]: INFO nova.compute.claims [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 964.603481] env[69796]: DEBUG nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Build of instance 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 964.603920] env[69796]: DEBUG nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 964.604161] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "refresh_cache-3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.604332] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquired lock "refresh_cache-3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.604506] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.614253] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.623584] env[69796]: INFO nova.scheduler.client.report [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Deleted allocations for instance d746d66b-32df-4a4d-97bd-82b4ad364461 [ 964.789999] env[69796]: INFO nova.compute.manager [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 7f6be89d-a63c-43c8-901a-feea613b35cf] Took 1.03 seconds to deallocate network for instance. [ 964.918906] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234436, 'name': Rename_Task, 'duration_secs': 0.23064} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.919305] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.919574] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a5f889d-196d-4bae-bb3d-9f2cccd12786 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.927208] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 964.927208] env[69796]: value = "task-4234438" [ 964.927208] env[69796]: _type = "Task" [ 964.927208] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.936367] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.045508] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234437, 'name': Rename_Task, 'duration_secs': 0.178926} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.045807] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 965.046109] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfad07b7-8548-4ec2-a354-4ed825c6a878 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.052208] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 965.052208] env[69796]: value = "task-4234439" [ 965.052208] env[69796]: _type = "Task" [ 965.052208] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.061136] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234439, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.132186] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.134829] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08c4d868-7e90-416e-b005-09ae9f9f5cb0 tempest-ServersWithSpecificFlavorTestJSON-738434098 tempest-ServersWithSpecificFlavorTestJSON-738434098-project-member] Lock "d746d66b-32df-4a4d-97bd-82b4ad364461" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.740s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.216373] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.437624] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234438, 'name': PowerOnVM_Task} progress is 81%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.565718] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234439, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.634405] env[69796]: DEBUG nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 965.651508] env[69796]: DEBUG nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 965.651508] env[69796]: DEBUG nova.compute.provider_tree [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 965.670769] env[69796]: DEBUG nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 965.692332] env[69796]: DEBUG nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 965.697459] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.697558] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.697827] env[69796]: INFO nova.compute.manager [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Attaching volume e346cd07-ce0d-4c36-b10c-6eed9b071b72 to /dev/sdb [ 965.719085] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Releasing lock "refresh_cache-3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.719345] env[69796]: DEBUG nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 965.719530] env[69796]: DEBUG nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.719699] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.738809] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7778382-e6cb-4d90-9a01-239c52927a9d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.743724] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.753341] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714c15cf-914c-4994-8884-aa85308b2256 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.777143] env[69796]: DEBUG nova.virt.block_device [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating existing volume attachment record: c3e5ff1a-f5e7-46ac-a426-94030eb0813e {{(pid=69796) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 965.825013] env[69796]: INFO nova.scheduler.client.report [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Deleted allocations for instance 7f6be89d-a63c-43c8-901a-feea613b35cf [ 965.940702] env[69796]: DEBUG oslo_vmware.api [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234438, 'name': PowerOnVM_Task, 'duration_secs': 0.851791} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.940966] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.941183] env[69796]: INFO nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Took 9.54 seconds to spawn the instance on the hypervisor. [ 965.941819] env[69796]: DEBUG nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 965.942898] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af73b8de-554f-4ecd-ac95-a2e2c1d03686 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.948623] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d94da3-5158-4199-8726-1f847a2a6ad6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.959959] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde454e2-f9ce-4b79-8196-e6dbbf614a9e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.997533] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557acd60-4d41-44df-bf95-9ae330baad0d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.004242] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f16a87-8e08-4424-bf13-a2cd5fa45cd2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.023159] env[69796]: DEBUG nova.compute.provider_tree [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 966.065664] env[69796]: DEBUG oslo_vmware.api [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234439, 'name': PowerOnVM_Task, 'duration_secs': 0.603548} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.065944] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 966.066153] env[69796]: INFO nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Took 7.29 seconds to spawn the instance on the hypervisor. [ 966.066334] env[69796]: DEBUG nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 966.067403] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d410195-3ed1-4d0a-866c-19ee8df0d9ac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.249823] env[69796]: DEBUG nova.network.neutron [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.337937] env[69796]: DEBUG oslo_concurrency.lockutils [None req-378f43c3-63cf-4379-a5b8-c2113140f337 tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "7f6be89d-a63c-43c8-901a-feea613b35cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.637s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.463281] env[69796]: INFO nova.compute.manager [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Took 28.27 seconds to build instance. [ 966.553017] env[69796]: ERROR nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [req-2726860f-f6ef-4801-8b20-80b49c5742f1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2726860f-f6ef-4801-8b20-80b49c5742f1"}]} [ 966.553017] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.953s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.553304] env[69796]: ERROR nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Traceback (most recent call last): [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] yield [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] self.set_inventory_for_provider( [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 966.553304] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2726860f-f6ef-4801-8b20-80b49c5742f1"}]} [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] During handling of the above exception, another exception occurred: [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Traceback (most recent call last): [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] with self.rt.instance_claim(context, instance, node, allocs, [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 966.553556] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] return f(*args, **kwargs) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] self._update(elevated, cn) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] self._update_to_placement(context, compute_node, startup) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] return attempt.get(self._wrap_exception) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] six.reraise(self.value[0], self.value[1], self.value[2]) [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] raise value [ 966.553859] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] self.reportclient.update_from_provider_tree( [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] with catch_all(pd.uuid): [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] self.gen.throw(typ, value, traceback) [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] raise exception.ResourceProviderSyncFailed() [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 966.554264] env[69796]: ERROR nova.compute.manager [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] [ 966.554813] env[69796]: DEBUG nova.compute.utils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 966.556504] env[69796]: DEBUG nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Build of instance f9460639-e09c-4c4f-a0e1-a518730368bb was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 966.556925] env[69796]: DEBUG nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 966.557173] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Acquiring lock "refresh_cache-f9460639-e09c-4c4f-a0e1-a518730368bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.557324] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Acquired lock "refresh_cache-f9460639-e09c-4c4f-a0e1-a518730368bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.557557] env[69796]: DEBUG nova.network.neutron [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.586029] env[69796]: INFO nova.compute.manager [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Took 27.45 seconds to build instance. [ 966.753232] env[69796]: INFO nova.compute.manager [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165] Took 1.03 seconds to deallocate network for instance. [ 966.965973] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c26795ec-f411-4ce7-a5ff-b1ac0942c275 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.536s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.089928] env[69796]: DEBUG oslo_concurrency.lockutils [None req-aab27543-c4b6-4a87-b425-6f9db65937b1 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.162s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.098283] env[69796]: DEBUG nova.network.neutron [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.180247] env[69796]: DEBUG nova.network.neutron [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.572437] env[69796]: DEBUG nova.compute.manager [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Received event network-changed-84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 967.572745] env[69796]: DEBUG nova.compute.manager [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Refreshing instance network info cache due to event network-changed-84c5a39e-6ab2-4353-8648-eb3fc939be20. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 967.572883] env[69796]: DEBUG oslo_concurrency.lockutils [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] Acquiring lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.573760] env[69796]: DEBUG oslo_concurrency.lockutils [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] Acquired lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.574618] env[69796]: DEBUG nova.network.neutron [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Refreshing network info cache for port 84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 967.641803] env[69796]: INFO nova.compute.manager [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Rescuing [ 967.642107] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 967.642266] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.642436] env[69796]: DEBUG nova.network.neutron [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.683597] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Releasing lock "refresh_cache-f9460639-e09c-4c4f-a0e1-a518730368bb" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.683930] env[69796]: DEBUG nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 967.684197] env[69796]: DEBUG nova.compute.manager [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] [instance: f9460639-e09c-4c4f-a0e1-a518730368bb] Skipping network deallocation for instance since networking was not requested. {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2292}} [ 967.799834] env[69796]: INFO nova.scheduler.client.report [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Deleted allocations for instance 3c939d6e-78d4-4dc0-ac3a-6d5e3c075165 [ 968.315249] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f9c34045-8e80-420d-bbfc-d8474efc3826 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "3c939d6e-78d4-4dc0-ac3a-6d5e3c075165" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.545s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.337509] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.338019] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.356984] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.358345] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.358836] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 968.361393] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.361393] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Cleaning up deleted instances {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11865}} [ 968.489117] env[69796]: DEBUG nova.network.neutron [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updated VIF entry in instance network info cache for port 84c5a39e-6ab2-4353-8648-eb3fc939be20. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 968.489497] env[69796]: DEBUG nova.network.neutron [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating instance_info_cache with network_info: [{"id": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "address": "fa:16:3e:91:76:c5", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84c5a39e-6a", "ovs_interfaceid": "84c5a39e-6ab2-4353-8648-eb3fc939be20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.495291] env[69796]: DEBUG nova.network.neutron [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.719553] env[69796]: INFO nova.scheduler.client.report [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Deleted allocations for instance f9460639-e09c-4c4f-a0e1-a518730368bb [ 968.840980] env[69796]: DEBUG nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 968.864070] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] There are 2 instances to clean {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11874}} [ 968.864520] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: d746d66b-32df-4a4d-97bd-82b4ad364461] Instance has had 0 of 5 cleanup attempts {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 968.992957] env[69796]: DEBUG oslo_concurrency.lockutils [req-92160263-175c-40fa-b800-cd45ecfcc9e8 req-edab9753-7cca-4d70-b752-7b71ca238ab0 service nova] Releasing lock "refresh_cache-78da661c-9020-40d1-b2e7-bc844c0bdbb0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.998157] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.227947] env[69796]: DEBUG oslo_concurrency.lockutils [None req-2a05e17e-8bc9-473a-8736-9cee70fc1ad0 tempest-ServerShowV254Test-643309847 tempest-ServerShowV254Test-643309847-project-member] Lock "f9460639-e09c-4c4f-a0e1-a518730368bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.268s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.364880] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.365190] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.366729] env[69796]: INFO nova.compute.claims [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.369331] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: d0e1a7df-f83f-43c2-a387-d2a378ff31b6] Instance has had 0 of 5 cleanup attempts {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 969.877298] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 969.877642] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Cleaning up deleted instances with incomplete migration {{(pid=69796) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11903}} [ 970.124989] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "e511a8e9-e293-4263-8fcf-3d154669dee9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.125265] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "e511a8e9-e293-4263-8fcf-3d154669dee9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.329313] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Volume attach. Driver type: vmdk {{(pid=69796) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 970.329544] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837830', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'name': 'volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd1c6fd2b-462e-4136-b3ba-a4d4d113e4d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'serial': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 970.330614] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094a21f9-0ddb-4a83-b07c-b21821788ae0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.348158] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b8767f-32a7-48b6-8cf4-f10e33820179 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.373017] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfiguring VM instance instance-00000053 to attach disk [localhost-esx-install-datastore] volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72/volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 970.373337] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-243e7d5c-baa0-4de4-b307-f0d93a21418d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.389065] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 970.396594] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 970.396594] env[69796]: value = "task-4234444" [ 970.396594] env[69796]: _type = "Task" [ 970.396594] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.406370] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.407484] env[69796]: DEBUG nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 970.421263] env[69796]: DEBUG nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 970.421476] env[69796]: DEBUG nova.compute.provider_tree [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.435419] env[69796]: DEBUG nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 970.455198] env[69796]: DEBUG nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 970.535368] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.536007] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61ac259e-44e2-40d8-8f82-51bd56cd5d9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.544044] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 970.544044] env[69796]: value = "task-4234445" [ 970.544044] env[69796]: _type = "Task" [ 970.544044] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.552621] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.628936] env[69796]: DEBUG nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 970.677576] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b9574f-7348-480c-8a52-b262cd88d8cc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.686721] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3186d5dc-6b10-4d7f-84f3-ef9be4c5b674 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.723330] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd3592a-c3a5-4703-ac09-ac730d294f77 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.732427] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e70e0e8-4b41-406d-90b8-bb5ad7972155 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.748980] env[69796]: DEBUG nova.compute.provider_tree [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 970.907868] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234444, 'name': ReconfigVM_Task, 'duration_secs': 0.423634} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.908191] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfigured VM instance instance-00000053 to attach disk [localhost-esx-install-datastore] volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72/volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.912876] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85c98fad-0c53-4b09-937c-3ce1e2c0c0c0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.929122] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 970.929122] env[69796]: value = "task-4234446" [ 970.929122] env[69796]: _type = "Task" [ 970.929122] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.938143] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234446, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.055766] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234445, 'name': PowerOffVM_Task, 'duration_secs': 0.284035} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.056292] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.057256] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c73f31-f393-4eb4-9a29-95d68d933642 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.078655] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da2ec5ff-f62d-4653-923a-0b75409638b2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.111494] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.111840] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35110303-20c5-4843-80cd-ac12999cb79c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.119578] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 971.119578] env[69796]: value = "task-4234447" [ 971.119578] env[69796]: _type = "Task" [ 971.119578] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.128391] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] VM already powered off {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 971.128602] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 971.128846] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.128996] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquired lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.129195] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 971.129435] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20fc4adf-c08f-48b0-ac6a-0f9f8d696d7a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.140253] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 971.140437] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 971.141166] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20713b73-af31-4725-891a-227cb68b1f4b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.147629] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 971.147629] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521b080b-0642-d4fa-fed1-f2c92533479e" [ 971.147629] env[69796]: _type = "Task" [ 971.147629] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.156579] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521b080b-0642-d4fa-fed1-f2c92533479e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.157535] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.270374] env[69796]: ERROR nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [req-65f19975-d59a-4d46-be90-2461d4fc27d2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-65f19975-d59a-4d46-be90-2461d4fc27d2"}]} [ 971.270766] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.906s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.271407] env[69796]: ERROR nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Traceback (most recent call last): [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] yield [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] self.set_inventory_for_provider( [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 971.271407] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-65f19975-d59a-4d46-be90-2461d4fc27d2"}]} [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] During handling of the above exception, another exception occurred: [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Traceback (most recent call last): [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] with self.rt.instance_claim(context, instance, node, allocs, [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 971.271597] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] return f(*args, **kwargs) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] self._update(elevated, cn) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] self._update_to_placement(context, compute_node, startup) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] return attempt.get(self._wrap_exception) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] six.reraise(self.value[0], self.value[1], self.value[2]) [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] raise value [ 971.271822] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] self.reportclient.update_from_provider_tree( [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] with catch_all(pd.uuid): [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] self.gen.throw(typ, value, traceback) [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] raise exception.ResourceProviderSyncFailed() [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 971.272142] env[69796]: ERROR nova.compute.manager [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] [ 971.272364] env[69796]: DEBUG nova.compute.utils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 971.273404] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.116s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.274852] env[69796]: INFO nova.compute.claims [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.279593] env[69796]: DEBUG nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Build of instance 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 971.280028] env[69796]: DEBUG nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 971.280668] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquiring lock "refresh_cache-8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.280668] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Acquired lock "refresh_cache-8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.280668] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 971.439287] env[69796]: DEBUG oslo_vmware.api [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234446, 'name': ReconfigVM_Task, 'duration_secs': 0.17843} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.439287] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837830', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'name': 'volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd1c6fd2b-462e-4136-b3ba-a4d4d113e4d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'serial': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 971.660066] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]521b080b-0642-d4fa-fed1-f2c92533479e, 'name': SearchDatastore_Task, 'duration_secs': 0.010783} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.660876] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e756bba-25f7-4300-b044-377a22e10eef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.666739] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 971.666739] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52c21e41-bb99-70e5-f619-206547685c5a" [ 971.666739] env[69796]: _type = "Task" [ 971.666739] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.675210] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52c21e41-bb99-70e5-f619-206547685c5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.804651] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 971.886215] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.177535] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52c21e41-bb99-70e5-f619-206547685c5a, 'name': SearchDatastore_Task, 'duration_secs': 0.009953} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.177870] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Releasing lock "[datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.178078] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/11e211db-44f8-4e34-8fec-8b87ab3fce6f-rescue.vmdk. {{(pid=69796) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 972.178351] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02ffd06d-d532-4779-94b1-79fd6e07d986 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.186460] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 972.186460] env[69796]: value = "task-4234448" [ 972.186460] env[69796]: _type = "Task" [ 972.186460] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.194849] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.308969] env[69796]: DEBUG nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 972.323567] env[69796]: DEBUG nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 972.323793] env[69796]: DEBUG nova.compute.provider_tree [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.336212] env[69796]: DEBUG nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 972.356927] env[69796]: DEBUG nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 972.389989] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Releasing lock "refresh_cache-8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.389989] env[69796]: DEBUG nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 972.390233] env[69796]: DEBUG nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 972.390364] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 972.413570] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 972.477252] env[69796]: DEBUG nova.objects.instance [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'flavor' on Instance uuid d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.602355] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a4f88e-5632-462b-99bd-b0b260aef560 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.611937] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323a30da-2c63-4b6a-85d0-844159fc7a4f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.645917] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bd46b6-8f1a-46a5-8e22-b70e770f6bdf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.654452] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b537c5b6-6b0f-4f40-89ce-08b52b0cab78 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.668772] env[69796]: DEBUG nova.compute.provider_tree [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.697965] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480914} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.698285] env[69796]: INFO nova.virt.vmwareapi.ds_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/11e211db-44f8-4e34-8fec-8b87ab3fce6f-rescue.vmdk. [ 972.699129] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a3bd7e-3846-4448-9ad2-b0a4233dd2b6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.726643] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/11e211db-44f8-4e34-8fec-8b87ab3fce6f-rescue.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 972.726948] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a357379f-fb55-44fd-8ab0-607969e43d79 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.746286] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 972.746286] env[69796]: value = "task-4234449" [ 972.746286] env[69796]: _type = "Task" [ 972.746286] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.755666] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.882781] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.883077] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.883268] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.883427] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.883583] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.916129] env[69796]: DEBUG nova.network.neutron [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.985253] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7998c1ad-1d37-40d5-8f06-648c32912e55 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.288s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.177091] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.178026] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.178026] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.178332] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.178362] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.180409] env[69796]: INFO nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Terminating instance [ 973.199678] env[69796]: ERROR nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [req-1cb2b139-cfe8-41aa-9477-f3b4049193a0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1cb2b139-cfe8-41aa-9477-f3b4049193a0"}]} [ 973.200020] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.927s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.200611] env[69796]: ERROR nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Traceback (most recent call last): [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] yield [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] self.set_inventory_for_provider( [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 973.200611] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1cb2b139-cfe8-41aa-9477-f3b4049193a0"}]} [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] During handling of the above exception, another exception occurred: [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Traceback (most recent call last): [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] with self.rt.instance_claim(context, instance, node, allocs, [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 973.200884] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] return f(*args, **kwargs) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] self._update(elevated, cn) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] self._update_to_placement(context, compute_node, startup) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] return attempt.get(self._wrap_exception) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] six.reraise(self.value[0], self.value[1], self.value[2]) [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] raise value [ 973.201261] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] self.reportclient.update_from_provider_tree( [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] with catch_all(pd.uuid): [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] self.gen.throw(typ, value, traceback) [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] raise exception.ResourceProviderSyncFailed() [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 973.201732] env[69796]: ERROR nova.compute.manager [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] [ 973.202094] env[69796]: DEBUG nova.compute.utils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 973.202932] env[69796]: DEBUG nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Build of instance e511a8e9-e293-4263-8fcf-3d154669dee9 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 973.203370] env[69796]: DEBUG nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 973.203649] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquiring lock "refresh_cache-e511a8e9-e293-4263-8fcf-3d154669dee9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.203747] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Acquired lock "refresh_cache-e511a8e9-e293-4263-8fcf-3d154669dee9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.203904] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 973.256539] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.386333] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.386652] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.386845] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.387010] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 973.387949] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85380c2c-eda1-4607-8398-d600872dd223 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.397017] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e372ad-18ef-4690-a811-bcba5e02627b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.411620] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930dec54-dc97-47c5-892e-fd4e134f0a8b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.418693] env[69796]: INFO nova.compute.manager [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] [instance: 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091] Took 1.03 seconds to deallocate network for instance. [ 973.422143] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369f60fb-4a5c-44a8-80e8-46ad2d753dcb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.455317] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180560MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 973.455497] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.455739] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.685257] env[69796]: DEBUG nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.685528] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.685832] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6b85c82-9b5c-4f13-b34d-c203b2446e0f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.694023] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 973.694023] env[69796]: value = "task-4234450" [ 973.694023] env[69796]: _type = "Task" [ 973.694023] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.704278] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.724120] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 973.757419] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234449, 'name': ReconfigVM_Task, 'duration_secs': 0.784727} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.757758] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f/11e211db-44f8-4e34-8fec-8b87ab3fce6f-rescue.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 973.758649] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e2030a-1db2-4376-9978-fc1a7574a73b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.788065] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92090eb7-e563-4240-ac54-77a00bd3ae3c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.807944] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 973.807944] env[69796]: value = "task-4234451" [ 973.807944] env[69796]: _type = "Task" [ 973.807944] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.817687] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.836228] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.204580] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234450, 'name': PowerOffVM_Task, 'duration_secs': 0.238199} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.204948] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.205087] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Volume detach. Driver type: vmdk {{(pid=69796) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 974.205288] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837830', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'name': 'volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd1c6fd2b-462e-4136-b3ba-a4d4d113e4d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'serial': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 974.206134] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848d9e64-9a29-4c1e-bc43-bf972e459bcf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.231252] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3b806d-d0bc-4b94-9827-1f58e77b6cdf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.236502] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53376031-0cd0-405d-ada1-e27197c70601 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.260116] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114d9fdd-4dca-46e8-9bf3-eb8e74e0f1f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.276095] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore] volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72/volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72.vmdk. No consolidation needed. {{(pid=69796) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 974.282539] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 974.282938] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1f47780-d988-4f34-b1e4-e2ddf2d5617a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.302513] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 974.302513] env[69796]: value = "task-4234452" [ 974.302513] env[69796]: _type = "Task" [ 974.302513] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.313614] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234452, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.322052] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234451, 'name': ReconfigVM_Task, 'duration_secs': 0.174826} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.323938] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.323938] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-636482f6-eeea-4e35-95eb-844676bc0f94 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.330134] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 974.330134] env[69796]: value = "task-4234453" [ 974.330134] env[69796]: _type = "Task" [ 974.330134] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.339655] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Releasing lock "refresh_cache-e511a8e9-e293-4263-8fcf-3d154669dee9" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.340066] env[69796]: DEBUG nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 974.340366] env[69796]: DEBUG nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.340706] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.342448] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.360489] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 974.455052] env[69796]: INFO nova.scheduler.client.report [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Deleted allocations for instance 8dc8c5ba-b800-4fbf-96fb-de1cdcae1091 [ 974.488851] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489387] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489387] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489387] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489387] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489894] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489894] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489894] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.489894] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.490236] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.490236] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.490236] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.490378] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 78da661c-9020-40d1-b2e7-bc844c0bdbb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.490423] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.491478] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e511a8e9-e293-4263-8fcf-3d154669dee9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 974.491478] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 974.491478] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 974.508059] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 974.525584] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 974.525820] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.537916] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 974.557517] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 974.755695] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be911600-b280-40f3-9445-ad1c07aeadb7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.764252] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d868368-61f8-4eb3-bf56-5252718b94ad {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.796847] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c774b552-7726-480e-b9fa-c4a905c7f6b5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.808553] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a105ef7-9804-40db-956a-fee0c8978aba {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.818965] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234452, 'name': ReconfigVM_Task, 'duration_secs': 0.24527} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.826764] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 974.832270] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.833565] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a41fb52-e931-476e-b3b0-5d35c819e3bb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.856104] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234453, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.858050] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 974.858050] env[69796]: value = "task-4234454" [ 974.858050] env[69796]: _type = "Task" [ 974.858050] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.862771] env[69796]: DEBUG nova.network.neutron [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.869813] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234454, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.965854] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d397518c-05f8-4f29-b9ac-fcd4ffce46fe tempest-ServerDiskConfigTestJSON-1684329210 tempest-ServerDiskConfigTestJSON-1684329210-project-member] Lock "8dc8c5ba-b800-4fbf-96fb-de1cdcae1091" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.628s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.343478] env[69796]: DEBUG oslo_vmware.api [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234453, 'name': PowerOnVM_Task, 'duration_secs': 0.530397} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.343823] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 975.349460] env[69796]: DEBUG nova.compute.manager [None req-4d5638c9-3ca9-4bc8-b131-ea25d5f56ad0 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 975.353096] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6498dd-7f54-4431-9201-d6e388bb61fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.365272] env[69796]: INFO nova.compute.manager [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] [instance: e511a8e9-e293-4263-8fcf-3d154669dee9] Took 1.02 seconds to deallocate network for instance. [ 975.368593] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-e0f41acf-defa-42c8-bcb6-6a81e33b30f0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0f41acf-defa-42c8-bcb6-6a81e33b30f0"}]} [ 975.368903] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.913s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.369417] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 975.369417] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 975.369417] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 975.369417] env[69796]: ERROR nova.compute.manager yield [ 975.369417] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 975.369417] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 975.369417] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 975.369417] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 975.369417] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-e0f41acf-defa-42c8-bcb6-6a81e33b30f0"}]} [ 975.369417] env[69796]: ERROR nova.compute.manager [ 975.369417] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 975.369417] env[69796]: ERROR nova.compute.manager [ 975.369743] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 975.369743] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 975.369743] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 975.369743] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 975.369743] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 975.369743] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 975.369743] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 975.369743] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 975.369743] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 975.369743] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 975.370310] env[69796]: ERROR nova.compute.manager raise value [ 975.370310] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 975.370310] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 975.370310] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 975.370310] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 975.370310] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 975.370310] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 975.370310] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 975.370310] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 975.370310] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 975.370310] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 975.370310] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 975.370310] env[69796]: ERROR nova.compute.manager [ 975.372908] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234454, 'name': ReconfigVM_Task, 'duration_secs': 0.165456} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.373430] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837830', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'name': 'volume-e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd1c6fd2b-462e-4136-b3ba-a4d4d113e4d7', 'attached_at': '', 'detached_at': '', 'volume_id': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72', 'serial': 'e346cd07-ce0d-4c36-b10c-6eed9b071b72'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 975.373714] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.374523] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4b3ada-4724-4c22-b5f0-d33ded67991f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.383124] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.383400] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d72e1da-dd41-4db7-a6af-eb4dbdee0e10 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.452671] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 975.453176] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 975.453583] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleting the datastore file [datastore2] d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 975.453901] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0986c0dd-a4be-4128-b1e0-88d7aa658a25 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.463037] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 975.463037] env[69796]: value = "task-4234456" [ 975.463037] env[69796]: _type = "Task" [ 975.463037] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.474016] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.842609] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 975.973159] env[69796]: DEBUG oslo_vmware.api [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146825} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.973639] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.973639] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.973824] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.973991] env[69796]: INFO nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 2.29 seconds to destroy the instance on the hypervisor. [ 975.974257] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.974456] env[69796]: DEBUG nova.compute.manager [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 975.974555] env[69796]: DEBUG nova.network.neutron [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.336175] env[69796]: DEBUG nova.compute.manager [req-90b981d4-b614-4a16-89a8-5ee7d5b36588 req-f51d190a-2915-4dc0-a544-935eb4ddfa9b service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Received event network-vif-deleted-640a621e-38ea-40b2-b71c-15fe3f0c1c42 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 976.336175] env[69796]: INFO nova.compute.manager [req-90b981d4-b614-4a16-89a8-5ee7d5b36588 req-f51d190a-2915-4dc0-a544-935eb4ddfa9b service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Neutron deleted interface 640a621e-38ea-40b2-b71c-15fe3f0c1c42; detaching it from the instance and deleting it from the info cache [ 976.336175] env[69796]: DEBUG nova.network.neutron [req-90b981d4-b614-4a16-89a8-5ee7d5b36588 req-f51d190a-2915-4dc0-a544-935eb4ddfa9b service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.419883] env[69796]: INFO nova.scheduler.client.report [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Deleted allocations for instance e511a8e9-e293-4263-8fcf-3d154669dee9 [ 976.590935] env[69796]: DEBUG nova.compute.manager [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 976.591248] env[69796]: DEBUG nova.compute.manager [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing instance network info cache due to event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 976.591484] env[69796]: DEBUG oslo_concurrency.lockutils [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.591631] env[69796]: DEBUG oslo_concurrency.lockutils [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.591795] env[69796]: DEBUG nova.network.neutron [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 976.752790] env[69796]: DEBUG nova.network.neutron [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.839317] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d3dc96c-fe11-4dc4-b839-53d86060b76a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.851180] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2941be-1265-4479-94ae-3ba19b2fbb95 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.884722] env[69796]: DEBUG nova.compute.manager [req-90b981d4-b614-4a16-89a8-5ee7d5b36588 req-f51d190a-2915-4dc0-a544-935eb4ddfa9b service nova] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Detach interface failed, port_id=640a621e-38ea-40b2-b71c-15fe3f0c1c42, reason: Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 976.931443] env[69796]: DEBUG oslo_concurrency.lockutils [None req-e1cb0047-ed92-4edb-8644-7232c502b9c6 tempest-ImagesTestJSON-1840668238 tempest-ImagesTestJSON-1840668238-project-member] Lock "e511a8e9-e293-4263-8fcf-3d154669dee9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.806s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.256275] env[69796]: INFO nova.compute.manager [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 1.28 seconds to deallocate network for instance. [ 977.579342] env[69796]: DEBUG nova.network.neutron [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updated VIF entry in instance network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.579790] env[69796]: DEBUG nova.network.neutron [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.830987] env[69796]: INFO nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 0.57 seconds to detach 1 volumes for instance. [ 977.924575] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Acquiring lock "40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.924971] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Lock "40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.082659] env[69796]: DEBUG oslo_concurrency.lockutils [req-5ec6a38c-4bf7-464e-a585-9f550a4722dd req-b44a7e04-d3b9-4f48-8930-587e28252200 service nova] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.337931] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.338261] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.338492] env[69796]: DEBUG nova.objects.instance [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'resources' on Instance uuid d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.428067] env[69796]: DEBUG nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 978.695883] env[69796]: DEBUG nova.compute.manager [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 978.696547] env[69796]: DEBUG nova.compute.manager [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing instance network info cache due to event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 978.697514] env[69796]: DEBUG oslo_concurrency.lockutils [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.698316] env[69796]: DEBUG oslo_concurrency.lockutils [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.698316] env[69796]: DEBUG nova.network.neutron [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 978.860395] env[69796]: DEBUG nova.scheduler.client.report [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 978.876873] env[69796]: DEBUG nova.scheduler.client.report [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 978.877299] env[69796]: DEBUG nova.compute.provider_tree [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 978.892331] env[69796]: DEBUG nova.scheduler.client.report [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 978.912058] env[69796]: DEBUG nova.scheduler.client.report [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 978.948241] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.272172] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be88f6cd-6a6a-4890-88c5-7dfe0fc9be64 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.280044] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0d4720-54e3-40fc-88d6-94b015a4a2bd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.311935] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9476c946-74fd-4dd4-883c-1738d9b42ea7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.322365] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db927bc-8058-44a8-8d78-01c5ad5f5d46 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.340572] env[69796]: DEBUG nova.compute.provider_tree [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.518780] env[69796]: DEBUG nova.network.neutron [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updated VIF entry in instance network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 979.519421] env[69796]: DEBUG nova.network.neutron [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.152", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.866023] env[69796]: ERROR nova.scheduler.client.report [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [req-28dc3ae5-dbcc-4039-803a-f8ca0f3eae90] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28dc3ae5-dbcc-4039-803a-f8ca0f3eae90"}]} [ 979.866023] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.526s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.866393] env[69796]: ERROR nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Traceback (most recent call last): [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] yield [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.set_inventory_for_provider( [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 979.866393] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28dc3ae5-dbcc-4039-803a-f8ca0f3eae90"}]} [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] During handling of the above exception, another exception occurred: [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Traceback (most recent call last): [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._delete_instance(context, instance, bdms) [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 979.866693] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._complete_deletion(context, instance) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update_resource_tracker(context, instance) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.rt.update_usage(context, instance, instance.node) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return f(*args, **kwargs) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update(context.elevated(), self.compute_nodes[nodename]) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update_to_placement(context, compute_node, startup) [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 979.866971] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return attempt.get(self._wrap_exception) [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise value [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.reportclient.update_from_provider_tree( [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] with catch_all(pd.uuid): [ 979.867311] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 979.867707] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.gen.throw(typ, value, traceback) [ 979.867707] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 979.867707] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise exception.ResourceProviderSyncFailed() [ 979.867707] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 979.867707] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 979.871902] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.923s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.873940] env[69796]: INFO nova.compute.claims [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 980.023504] env[69796]: DEBUG oslo_concurrency.lockutils [req-c444b1fb-676e-4286-b098-951b93465399 req-08f95a3e-28b6-45c0-b06f-88b4cde00eb5 service nova] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.378826] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.201s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.732251] env[69796]: DEBUG nova.compute.manager [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 980.732460] env[69796]: DEBUG nova.compute.manager [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing instance network info cache due to event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 980.732679] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.732829] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 980.733062] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 980.917715] env[69796]: DEBUG nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 980.941151] env[69796]: DEBUG nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 980.941151] env[69796]: DEBUG nova.compute.provider_tree [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.957237] env[69796]: DEBUG nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 980.979782] env[69796]: DEBUG nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 981.257871] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a37d81f-a09f-412d-bc82-dfbfc2919074 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.266466] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc0c889-2609-44f9-a511-43701ca83425 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.310043] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eda2d22-ce73-4466-87d0-dfc458780f6e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.320305] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f39a8-e0a6-4079-b439-217ea3e596a2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.338874] env[69796]: DEBUG nova.compute.provider_tree [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 981.436146] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.436414] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.436646] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.436853] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.437041] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.439815] env[69796]: INFO nova.compute.manager [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Terminating instance [ 981.498359] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updated VIF entry in instance network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 981.498750] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.868309] env[69796]: ERROR nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [req-31141f5e-b380-4677-8f1f-e190f7aef0d5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31141f5e-b380-4677-8f1f-e190f7aef0d5"}]} [ 981.868309] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.995s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.868679] env[69796]: ERROR nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Traceback (most recent call last): [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] yield [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] self.set_inventory_for_provider( [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 981.868679] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-31141f5e-b380-4677-8f1f-e190f7aef0d5"}]} [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] During handling of the above exception, another exception occurred: [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Traceback (most recent call last): [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] with self.rt.instance_claim(context, instance, node, allocs, [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 981.868859] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] return f(*args, **kwargs) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] self._update(elevated, cn) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] self._update_to_placement(context, compute_node, startup) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] return attempt.get(self._wrap_exception) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] six.reraise(self.value[0], self.value[1], self.value[2]) [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] raise value [ 981.869078] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] self.reportclient.update_from_provider_tree( [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] with catch_all(pd.uuid): [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] self.gen.throw(typ, value, traceback) [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] raise exception.ResourceProviderSyncFailed() [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 981.869390] env[69796]: ERROR nova.compute.manager [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] [ 981.869618] env[69796]: DEBUG nova.compute.utils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 981.873178] env[69796]: DEBUG nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Build of instance 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 981.873178] env[69796]: DEBUG nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 981.873178] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Acquiring lock "refresh_cache-40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.873178] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Acquired lock "refresh_cache-40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.873677] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 981.897752] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.898152] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.898361] env[69796]: DEBUG oslo_concurrency.lockutils [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.898538] env[69796]: INFO nova.compute.manager [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Successfully reverted task state from None on failure for instance. [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server [None req-839f9a96-7f4d-42c2-9daa-ad96e05833bd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server yield [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-28dc3ae5-dbcc-4039-803a-f8ca0f3eae90"}]} [ 981.902915] env[69796]: ERROR oslo_messaging.rpc.server [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 981.903214] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 981.903644] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 981.904102] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 981.904571] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 981.905019] env[69796]: ERROR oslo_messaging.rpc.server [ 981.948173] env[69796]: DEBUG nova.compute.manager [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.948173] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.948173] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff1f398-71ca-4d3f-ac51-81d7cedc35a1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.958318] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.958318] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f50e5705-e93c-47f0-95e1-09071c4060a3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.965635] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 981.965635] env[69796]: value = "task-4234457" [ 981.965635] env[69796]: _type = "Task" [ 981.965635] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.974404] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.005271] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.005561] env[69796]: DEBUG nova.compute.manager [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 982.005742] env[69796]: DEBUG nova.compute.manager [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing instance network info cache due to event network-changed-a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 982.005972] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Acquiring lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.006150] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Acquired lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.006315] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Refreshing network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.394664] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 982.470997] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.477284] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234457, 'name': PowerOffVM_Task, 'duration_secs': 0.223912} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.477565] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.477680] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.477930] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66c50df0-4772-40ef-9721-52f8bad0660d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.545694] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.545694] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Deleting contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.545899] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Deleting the datastore file [datastore1] 983f57b3-3bfb-41ce-a924-d48c72d25c9f {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.546196] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d23fb50-be6e-4fbd-b257-80c8fda21d01 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.555414] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for the task: (returnval){ [ 982.555414] env[69796]: value = "task-4234459" [ 982.555414] env[69796]: _type = "Task" [ 982.555414] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.563912] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.712920] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updated VIF entry in instance network info cache for port a447dcfd-590e-4909-adfb-d7b2edf8b91d. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 982.713305] env[69796]: DEBUG nova.network.neutron [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [{"id": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "address": "fa:16:3e:db:6e:d5", "network": {"id": "1a211050-7974-4d8f-943e-d178eecdb202", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1965542559-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a47349b96df54bbb870c223414206b63", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e05affa-2640-435e-a124-0ee8a6ab1152", "external-id": "nsx-vlan-transportzone-839", "segmentation_id": 839, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa447dcfd-59", "ovs_interfaceid": "a447dcfd-590e-4909-adfb-d7b2edf8b91d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.974857] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Releasing lock "refresh_cache-40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.975166] env[69796]: DEBUG nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 982.975579] env[69796]: DEBUG nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.975579] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 982.992069] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 983.067725] env[69796]: DEBUG oslo_vmware.api [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Task: {'id': task-4234459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193667} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.068052] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 983.068246] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Deleted contents of the VM from datastore datastore1 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 983.068422] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 983.068592] env[69796]: INFO nova.compute.manager [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 983.068832] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 983.069047] env[69796]: DEBUG nova.compute.manager [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 983.069151] env[69796]: DEBUG nova.network.neutron [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.216170] env[69796]: DEBUG oslo_concurrency.lockutils [req-e32068fd-95e6-406d-b242-c75dbe3af511 req-a9066fa3-edac-47cd-8c3c-b3c88446f407 service nova] Releasing lock "refresh_cache-983f57b3-3bfb-41ce-a924-d48c72d25c9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.336850] env[69796]: DEBUG nova.compute.manager [req-c60099a1-49dd-4d4b-815b-398531e533e0 req-b5ecfd47-7dcc-49e1-8b9f-d03984770b56 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Received event network-vif-deleted-a447dcfd-590e-4909-adfb-d7b2edf8b91d {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 983.337096] env[69796]: INFO nova.compute.manager [req-c60099a1-49dd-4d4b-815b-398531e533e0 req-b5ecfd47-7dcc-49e1-8b9f-d03984770b56 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Neutron deleted interface a447dcfd-590e-4909-adfb-d7b2edf8b91d; detaching it from the instance and deleting it from the info cache [ 983.337333] env[69796]: DEBUG nova.network.neutron [req-c60099a1-49dd-4d4b-815b-398531e533e0 req-b5ecfd47-7dcc-49e1-8b9f-d03984770b56 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.496652] env[69796]: DEBUG nova.network.neutron [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.541882] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Acquiring lock "c751e217-109b-4904-baba-60c0b9b856a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.542115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Lock "c751e217-109b-4904-baba-60c0b9b856a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.820035] env[69796]: DEBUG nova.network.neutron [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.840589] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c523b803-2810-4473-80e9-21e86d83e799 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.856011] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db4d57e-5590-4b17-bb0b-2535e6869e1c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.880757] env[69796]: DEBUG nova.compute.manager [req-c60099a1-49dd-4d4b-815b-398531e533e0 req-b5ecfd47-7dcc-49e1-8b9f-d03984770b56 service nova] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Detach interface failed, port_id=a447dcfd-590e-4909-adfb-d7b2edf8b91d, reason: Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 983.998923] env[69796]: INFO nova.compute.manager [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] [instance: 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0] Took 1.02 seconds to deallocate network for instance. [ 984.045466] env[69796]: DEBUG nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 984.322843] env[69796]: INFO nova.compute.manager [-] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Took 1.25 seconds to deallocate network for instance. [ 984.577461] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.577806] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.579281] env[69796]: INFO nova.compute.claims [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 984.829224] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.028704] env[69796]: INFO nova.scheduler.client.report [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Deleted allocations for instance 40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0 [ 985.537024] env[69796]: DEBUG oslo_concurrency.lockutils [None req-7843de2d-9730-40ee-8829-cb340d9cca1c tempest-ImagesOneServerTestJSON-774865120 tempest-ImagesOneServerTestJSON-774865120-project-member] Lock "40a5fa7a-d1a5-4ff4-8a23-f9b95c27d5e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.612s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.611558] env[69796]: DEBUG nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 985.629366] env[69796]: DEBUG nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 985.629641] env[69796]: DEBUG nova.compute.provider_tree [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.640769] env[69796]: DEBUG nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 985.660449] env[69796]: DEBUG nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 985.863165] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081f078f-20f6-4557-ab98-489fb620c67f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.872104] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3161457e-5985-472e-b0de-1cfa7c308936 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.906996] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ebe6e3-9e97-4502-b4e3-33f9a2eb35dc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.915747] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b675368-6ed4-4212-8b23-1cdff38496d9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.930292] env[69796]: DEBUG nova.compute.provider_tree [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.452232] env[69796]: ERROR nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [req-71c3a167-cfe5-49a7-b533-34ada55ec67e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-71c3a167-cfe5-49a7-b533-34ada55ec67e"}]} [ 986.452677] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.875s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.453514] env[69796]: ERROR nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] Traceback (most recent call last): [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] yield [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] self.set_inventory_for_provider( [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 986.453514] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-71c3a167-cfe5-49a7-b533-34ada55ec67e"}]} [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] During handling of the above exception, another exception occurred: [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] Traceback (most recent call last): [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] with self.rt.instance_claim(context, instance, node, allocs, [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 986.453782] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] return f(*args, **kwargs) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] self._update(elevated, cn) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] self._update_to_placement(context, compute_node, startup) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] return attempt.get(self._wrap_exception) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] six.reraise(self.value[0], self.value[1], self.value[2]) [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] raise value [ 986.454056] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] self.reportclient.update_from_provider_tree( [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] with catch_all(pd.uuid): [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] self.gen.throw(typ, value, traceback) [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] raise exception.ResourceProviderSyncFailed() [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 986.454365] env[69796]: ERROR nova.compute.manager [instance: c751e217-109b-4904-baba-60c0b9b856a6] [ 986.454619] env[69796]: DEBUG nova.compute.utils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 986.455417] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.626s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.455635] env[69796]: DEBUG nova.objects.instance [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lazy-loading 'resources' on Instance uuid 983f57b3-3bfb-41ce-a924-d48c72d25c9f {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.456956] env[69796]: DEBUG nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Build of instance c751e217-109b-4904-baba-60c0b9b856a6 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 986.457361] env[69796]: DEBUG nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 986.457585] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Acquiring lock "refresh_cache-c751e217-109b-4904-baba-60c0b9b856a6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.457806] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Acquired lock "refresh_cache-c751e217-109b-4904-baba-60c0b9b856a6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.457984] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.978382] env[69796]: DEBUG nova.scheduler.client.report [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 986.981176] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.993743] env[69796]: DEBUG nova.scheduler.client.report [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 986.993975] env[69796]: DEBUG nova.compute.provider_tree [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.008322] env[69796]: DEBUG nova.scheduler.client.report [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 987.028882] env[69796]: DEBUG nova.scheduler.client.report [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 987.068103] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.233910] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8fca54-2a29-4f71-a6b4-50748b2cc7db {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.242438] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74e0490-ef0e-4e20-9396-beab89e4da51 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.274926] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140f1dd9-adff-4427-bce7-078fa5ff6aec {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.283441] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3fa9c2-6995-4ff5-a117-59aeaaa83941 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.298735] env[69796]: DEBUG nova.compute.provider_tree [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.571101] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Releasing lock "refresh_cache-c751e217-109b-4904-baba-60c0b9b856a6" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.571101] env[69796]: DEBUG nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 987.571101] env[69796]: DEBUG nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 987.571513] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 987.589478] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 987.821412] env[69796]: ERROR nova.scheduler.client.report [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [req-a29370e4-2b52-4801-8b4e-03d7a812fc8f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a29370e4-2b52-4801-8b4e-03d7a812fc8f"}]} [ 987.821566] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.366s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.822449] env[69796]: ERROR nova.compute.manager [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Traceback (most recent call last): [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] yield [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self.set_inventory_for_provider( [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 987.822449] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a29370e4-2b52-4801-8b4e-03d7a812fc8f"}]} [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] During handling of the above exception, another exception occurred: [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Traceback (most recent call last): [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self._delete_instance(context, instance, bdms) [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 987.822654] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self._complete_deletion(context, instance) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self._update_resource_tracker(context, instance) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self.rt.update_usage(context, instance, instance.node) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] return f(*args, **kwargs) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self._update(context.elevated(), self.compute_nodes[nodename]) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self._update_to_placement(context, compute_node, startup) [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 987.822876] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] return attempt.get(self._wrap_exception) [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] raise value [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self.reportclient.update_from_provider_tree( [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] with catch_all(pd.uuid): [ 987.823156] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 987.823478] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] self.gen.throw(typ, value, traceback) [ 987.823478] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 987.823478] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] raise exception.ResourceProviderSyncFailed() [ 987.823478] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 987.823478] env[69796]: ERROR nova.compute.manager [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] [ 988.092389] env[69796]: DEBUG nova.network.neutron [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.327224] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.891s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.596058] env[69796]: INFO nova.compute.manager [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] [instance: c751e217-109b-4904-baba-60c0b9b856a6] Took 1.02 seconds to deallocate network for instance. [ 989.634064] env[69796]: INFO nova.scheduler.client.report [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Deleted allocations for instance c751e217-109b-4904-baba-60c0b9b856a6 [ 989.838918] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.839240] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.839845] env[69796]: DEBUG oslo_concurrency.lockutils [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.839845] env[69796]: INFO nova.compute.manager [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] [instance: 983f57b3-3bfb-41ce-a924-d48c72d25c9f] Successfully reverted task state from None on failure for instance. [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server [None req-c90887db-f8ef-4d34-9794-4801f13ee2c7 tempest-ServerRescueTestJSONUnderV235-1689322081 tempest-ServerRescueTestJSONUnderV235-1689322081-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server yield [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a29370e4-2b52-4801-8b4e-03d7a812fc8f"}]} [ 989.843210] env[69796]: ERROR oslo_messaging.rpc.server [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 989.843548] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 989.844028] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 989.844509] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 989.844949] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 989.845505] env[69796]: ERROR oslo_messaging.rpc.server [ 990.146887] env[69796]: DEBUG oslo_concurrency.lockutils [None req-f4bf5309-4e03-4e8d-b1b2-69de1b61b003 tempest-ImagesNegativeTestJSON-1361388712 tempest-ImagesNegativeTestJSON-1361388712-project-member] Lock "c751e217-109b-4904-baba-60c0b9b856a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.604s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.140941] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.141326] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.141445] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.141608] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.141781] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.144447] env[69796]: INFO nova.compute.manager [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Terminating instance [ 1000.647863] env[69796]: DEBUG nova.compute.manager [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1000.648136] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1000.650278] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25039bec-59ff-44a4-b449-72b0b044c1a2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.656857] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1000.657100] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed8d1c20-762e-4738-bdc5-239569657418 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.663364] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 1000.663364] env[69796]: value = "task-4234460" [ 1000.663364] env[69796]: _type = "Task" [ 1000.663364] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.672371] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.173427] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234460, 'name': PowerOffVM_Task, 'duration_secs': 0.201038} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.173831] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.173907] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.174125] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10edb176-4137-4a2f-9dc6-eaef6e9cbbd9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.231507] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.231724] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.231914] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Deleting the datastore file [datastore2] e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.232219] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b972612a-de6b-42cd-8a6f-f0085f5944c3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.239845] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for the task: (returnval){ [ 1001.239845] env[69796]: value = "task-4234462" [ 1001.239845] env[69796]: _type = "Task" [ 1001.239845] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.250181] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.749606] env[69796]: DEBUG oslo_vmware.api [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Task: {'id': task-4234462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132283} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.749980] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1001.750319] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1001.750561] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1001.750734] env[69796]: INFO nova.compute.manager [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1001.750987] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.751195] env[69796]: DEBUG nova.compute.manager [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1001.751293] env[69796]: DEBUG nova.network.neutron [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.210296] env[69796]: DEBUG nova.compute.manager [req-a957d72f-1379-4d74-95e8-36a9bc44d5b8 req-7ce574e4-4f37-4c27-806f-9d12091b4118 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Received event network-vif-deleted-76652ff3-72d0-410c-abd6-d0e0e4bfcdc7 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1002.210794] env[69796]: INFO nova.compute.manager [req-a957d72f-1379-4d74-95e8-36a9bc44d5b8 req-7ce574e4-4f37-4c27-806f-9d12091b4118 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Neutron deleted interface 76652ff3-72d0-410c-abd6-d0e0e4bfcdc7; detaching it from the instance and deleting it from the info cache [ 1002.210794] env[69796]: DEBUG nova.network.neutron [req-a957d72f-1379-4d74-95e8-36a9bc44d5b8 req-7ce574e4-4f37-4c27-806f-9d12091b4118 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.689487] env[69796]: DEBUG nova.network.neutron [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.712887] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17c3237c-59b7-4032-a73b-c8a9b4554115 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.724291] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce5f523-28b9-423a-8485-ddbee86820d1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.748757] env[69796]: DEBUG nova.compute.manager [req-a957d72f-1379-4d74-95e8-36a9bc44d5b8 req-7ce574e4-4f37-4c27-806f-9d12091b4118 service nova] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Detach interface failed, port_id=76652ff3-72d0-410c-abd6-d0e0e4bfcdc7, reason: Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1003.192698] env[69796]: INFO nova.compute.manager [-] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Took 1.44 seconds to deallocate network for instance. [ 1003.699545] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.699894] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.700075] env[69796]: DEBUG nova.objects.instance [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lazy-loading 'resources' on Instance uuid e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.221163] env[69796]: DEBUG nova.scheduler.client.report [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1004.235588] env[69796]: DEBUG nova.scheduler.client.report [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1004.235805] env[69796]: DEBUG nova.compute.provider_tree [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.247066] env[69796]: DEBUG nova.scheduler.client.report [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1004.264885] env[69796]: DEBUG nova.scheduler.client.report [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1004.412448] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42914f2a-9e72-4a1a-8980-6c213d6f4ba4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.420454] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02894ee5-978f-4860-bcdf-9ab0660ea3cd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.451878] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660d8707-f039-499a-b14e-7a13aeadbaa3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.459582] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3ea4c4-6b29-4f39-bad4-977ae0a1afe7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.473330] env[69796]: DEBUG nova.compute.provider_tree [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1004.993852] env[69796]: ERROR nova.scheduler.client.report [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [req-baa0661f-599c-440c-be14-8e955ad1c58d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-baa0661f-599c-440c-be14-8e955ad1c58d"}]} [ 1004.994240] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.294s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.995241] env[69796]: ERROR nova.compute.manager [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Traceback (most recent call last): [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] yield [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self.set_inventory_for_provider( [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1004.995241] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-baa0661f-599c-440c-be14-8e955ad1c58d"}]} [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] During handling of the above exception, another exception occurred: [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Traceback (most recent call last): [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self._delete_instance(context, instance, bdms) [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1004.995528] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self._complete_deletion(context, instance) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self._update_resource_tracker(context, instance) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self.rt.update_usage(context, instance, instance.node) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] return f(*args, **kwargs) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self._update(context.elevated(), self.compute_nodes[nodename]) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self._update_to_placement(context, compute_node, startup) [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1004.995764] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] return attempt.get(self._wrap_exception) [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] raise value [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self.reportclient.update_from_provider_tree( [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] with catch_all(pd.uuid): [ 1004.996121] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1004.996624] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] self.gen.throw(typ, value, traceback) [ 1004.996624] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1004.996624] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] raise exception.ResourceProviderSyncFailed() [ 1004.996624] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1004.996624] env[69796]: ERROR nova.compute.manager [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] [ 1005.499826] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.358s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.158640] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.158640] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.662021] env[69796]: DEBUG nova.compute.utils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.011359] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.011828] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.011908] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.012092] env[69796]: INFO nova.compute.manager [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] [instance: e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7] Successfully reverted task state from None on failure for instance. [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server [None req-6a2bd0a5-dab6-4f6e-a2b6-ac18f2bb2d98 tempest-ServersTestFqdnHostnames-265094891 tempest-ServersTestFqdnHostnames-265094891-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server yield [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-baa0661f-599c-440c-be14-8e955ad1c58d"}]} [ 1007.016275] env[69796]: ERROR oslo_messaging.rpc.server [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1007.016652] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1007.017139] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 1007.017640] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1007.018132] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1007.018575] env[69796]: ERROR oslo_messaging.rpc.server [ 1007.165480] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.231272] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.231648] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.231972] env[69796]: INFO nova.compute.manager [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Attaching volume 2327c06a-8e20-4ca7-b545-6cbea3a21e0b to /dev/sdb [ 1008.268027] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e277f7-36fd-4d43-a38c-8c497f606b5f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.277097] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472b465b-3fbf-4525-bbbb-88b6327e5a87 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.290393] env[69796]: DEBUG nova.virt.block_device [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating existing volume attachment record: 9c7c3a50-4c7f-4dee-806d-6e8808052d80 {{(pid=69796) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1012.125288] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Acquiring lock "f566b89a-cb28-4a80-898e-bb44f1301b30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.125596] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Lock "f566b89a-cb28-4a80-898e-bb44f1301b30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.630810] env[69796]: DEBUG nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1012.838528] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Volume attach. Driver type: vmdk {{(pid=69796) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1012.838528] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837832', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'name': 'volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78da661c-9020-40d1-b2e7-bc844c0bdbb0', 'attached_at': '', 'detached_at': '', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'serial': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1012.839737] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-934bf04b-4cb6-4e84-87ba-0ec68ec2faee {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.856641] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fddb1f-9a50-4df6-b8bf-ad1d91ef7fb7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.883790] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfiguring VM instance instance-00000055 to attach disk [localhost-esx-install-datastore] volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b/volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.884221] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f7112a7-3f20-4a81-81f2-6c6e54f600b9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.903383] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1012.903383] env[69796]: value = "task-4234467" [ 1012.903383] env[69796]: _type = "Task" [ 1012.903383] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.915287] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234467, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.152891] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.153217] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.154722] env[69796]: INFO nova.compute.claims [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.413594] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234467, 'name': ReconfigVM_Task, 'duration_secs': 0.211563} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.413887] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfigured VM instance instance-00000055 to attach disk [localhost-esx-install-datastore] volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b/volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1013.419047] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60abfee8-99ce-44e5-a06e-0eae9bc2bb3e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.436634] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1013.436634] env[69796]: value = "task-4234468" [ 1013.436634] env[69796]: _type = "Task" [ 1013.436634] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.446298] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.947841] env[69796]: DEBUG oslo_vmware.api [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234468, 'name': ReconfigVM_Task, 'duration_secs': 0.130911} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.948272] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837832', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'name': 'volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78da661c-9020-40d1-b2e7-bc844c0bdbb0', 'attached_at': '', 'detached_at': '', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'serial': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1014.180154] env[69796]: DEBUG nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1014.194610] env[69796]: DEBUG nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1014.194843] env[69796]: DEBUG nova.compute.provider_tree [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.205754] env[69796]: DEBUG nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1014.226147] env[69796]: DEBUG nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1014.399799] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc535c8-ac60-4c4f-8cc7-ecb62af64499 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.407662] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dad845-50fb-421a-8510-1abedd5ee1a2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.437472] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f77649-c3a8-479a-9547-c482e67161fd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.445244] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66d1996-bc7e-4948-a10a-e5c95cd8d2d5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.460532] env[69796]: DEBUG nova.compute.provider_tree [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1014.981772] env[69796]: ERROR nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [req-2d0600ac-211c-4a49-928c-728b23c65006] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2d0600ac-211c-4a49-928c-728b23c65006"}]} [ 1014.982219] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.829s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.982862] env[69796]: ERROR nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Traceback (most recent call last): [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] yield [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] self.set_inventory_for_provider( [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1014.982862] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2d0600ac-211c-4a49-928c-728b23c65006"}]} [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] During handling of the above exception, another exception occurred: [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Traceback (most recent call last): [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] with self.rt.instance_claim(context, instance, node, allocs, [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1014.983144] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] return f(*args, **kwargs) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] self._update(elevated, cn) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] self._update_to_placement(context, compute_node, startup) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] return attempt.get(self._wrap_exception) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] raise value [ 1014.983381] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] self.reportclient.update_from_provider_tree( [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] with catch_all(pd.uuid): [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] self.gen.throw(typ, value, traceback) [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] raise exception.ResourceProviderSyncFailed() [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1014.983668] env[69796]: ERROR nova.compute.manager [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] [ 1014.983921] env[69796]: DEBUG nova.compute.utils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1014.985717] env[69796]: DEBUG nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Build of instance f566b89a-cb28-4a80-898e-bb44f1301b30 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1014.986176] env[69796]: DEBUG nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1014.986450] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Acquiring lock "refresh_cache-f566b89a-cb28-4a80-898e-bb44f1301b30" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.986644] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Acquired lock "refresh_cache-f566b89a-cb28-4a80-898e-bb44f1301b30" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.986839] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1014.993046] env[69796]: DEBUG nova.objects.instance [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'flavor' on Instance uuid 78da661c-9020-40d1-b2e7-bc844c0bdbb0 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.497818] env[69796]: DEBUG oslo_concurrency.lockutils [None req-3003eafe-f628-49f0-8ed0-60a4920dda89 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.266s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.506524] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1015.575110] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.785191] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.785485] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.078420] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Releasing lock "refresh_cache-f566b89a-cb28-4a80-898e-bb44f1301b30" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.078666] env[69796]: DEBUG nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1016.078852] env[69796]: DEBUG nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1016.079041] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1016.095360] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1016.288415] env[69796]: INFO nova.compute.manager [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Detaching volume 2327c06a-8e20-4ca7-b545-6cbea3a21e0b [ 1016.321036] env[69796]: INFO nova.virt.block_device [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Attempting to driver detach volume 2327c06a-8e20-4ca7-b545-6cbea3a21e0b from mountpoint /dev/sdb [ 1016.321036] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Volume detach. Driver type: vmdk {{(pid=69796) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1016.321203] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837832', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'name': 'volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78da661c-9020-40d1-b2e7-bc844c0bdbb0', 'attached_at': '', 'detached_at': '', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'serial': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1016.322088] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d61b81-0a08-49e8-bf6d-00b0f9c66d51 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.344593] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb384558-7df5-4ec7-ba07-22c558f877ea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.352260] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07fed20-a859-4563-b0ca-cc8ce9c12db9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.373333] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555c8a2b-ec10-4897-a08e-3e5a58448872 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.390211] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore] volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b/volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b.vmdk. No consolidation needed. {{(pid=69796) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1016.395647] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfiguring VM instance instance-00000055 to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1016.395997] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fed8b608-3829-4dfd-8a8c-dfa02fe0e58c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.414692] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1016.414692] env[69796]: value = "task-4234469" [ 1016.414692] env[69796]: _type = "Task" [ 1016.414692] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.423242] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.598878] env[69796]: DEBUG nova.network.neutron [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.925646] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234469, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.102382] env[69796]: INFO nova.compute.manager [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] [instance: f566b89a-cb28-4a80-898e-bb44f1301b30] Took 1.02 seconds to deallocate network for instance. [ 1017.426959] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234469, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.927407] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234469, 'name': ReconfigVM_Task, 'duration_secs': 1.212312} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.927777] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Reconfigured VM instance instance-00000055 to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1017.932361] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-274a07a5-99b8-4964-ab44-0349420fcfbf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.947567] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1017.947567] env[69796]: value = "task-4234470" [ 1017.947567] env[69796]: _type = "Task" [ 1017.947567] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.955831] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234470, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.136652] env[69796]: INFO nova.scheduler.client.report [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Deleted allocations for instance f566b89a-cb28-4a80-898e-bb44f1301b30 [ 1018.458984] env[69796]: DEBUG oslo_vmware.api [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234470, 'name': ReconfigVM_Task, 'duration_secs': 0.130145} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.460098] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837832', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'name': 'volume-2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '78da661c-9020-40d1-b2e7-bc844c0bdbb0', 'attached_at': '', 'detached_at': '', 'volume_id': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b', 'serial': '2327c06a-8e20-4ca7-b545-6cbea3a21e0b'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1018.644953] env[69796]: DEBUG oslo_concurrency.lockutils [None req-6de35758-bf12-4b0b-8dba-2e175e5f0b3d tempest-ServersNegativeTestMultiTenantJSON-1492792187 tempest-ServersNegativeTestMultiTenantJSON-1492792187-project-member] Lock "f566b89a-cb28-4a80-898e-bb44f1301b30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.519s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.004054] env[69796]: DEBUG nova.objects.instance [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'flavor' on Instance uuid 78da661c-9020-40d1-b2e7-bc844c0bdbb0 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.014325] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a03ca89d-939d-4852-8af4-9c1c65c368fe tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.229s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.098381] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.101115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.101115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.101115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.101115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.103993] env[69796]: INFO nova.compute.manager [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Terminating instance [ 1021.610901] env[69796]: DEBUG nova.compute.manager [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1021.611068] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.612481] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cace0f0d-7c29-400a-8ade-6692dca178f4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.622256] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.622532] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2211f29c-7ba2-4262-baa9-d3f66ae69cd5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.629818] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1021.629818] env[69796]: value = "task-4234471" [ 1021.629818] env[69796]: _type = "Task" [ 1021.629818] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.638792] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.141029] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234471, 'name': PowerOffVM_Task, 'duration_secs': 0.193551} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.141029] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.141348] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1022.141387] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b75a2885-919f-4c8f-a7db-e22aece1fa4f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.204490] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.204878] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.205137] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleting the datastore file [datastore2] 78da661c-9020-40d1-b2e7-bc844c0bdbb0 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.205431] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c904652a-f8bf-4b68-8002-97ef0c54a052 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.212610] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1022.212610] env[69796]: value = "task-4234473" [ 1022.212610] env[69796]: _type = "Task" [ 1022.212610] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.220916] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234473, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.724346] env[69796]: DEBUG oslo_vmware.api [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234473, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134381} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.724482] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.724591] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.724760] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.724937] env[69796]: INFO nova.compute.manager [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1022.725273] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.725488] env[69796]: DEBUG nova.compute.manager [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1022.725588] env[69796]: DEBUG nova.network.neutron [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.271489] env[69796]: DEBUG nova.compute.manager [req-4036b26a-2d28-451d-ab79-c6253e9991cb req-33264e16-fed9-4dc8-84ad-efca66dc9dae service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Received event network-vif-deleted-84c5a39e-6ab2-4353-8648-eb3fc939be20 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1023.271812] env[69796]: INFO nova.compute.manager [req-4036b26a-2d28-451d-ab79-c6253e9991cb req-33264e16-fed9-4dc8-84ad-efca66dc9dae service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Neutron deleted interface 84c5a39e-6ab2-4353-8648-eb3fc939be20; detaching it from the instance and deleting it from the info cache [ 1023.271988] env[69796]: DEBUG nova.network.neutron [req-4036b26a-2d28-451d-ab79-c6253e9991cb req-33264e16-fed9-4dc8-84ad-efca66dc9dae service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.752240] env[69796]: DEBUG nova.network.neutron [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.775035] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-497bc860-777c-4559-9b38-c52372b27c32 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.785185] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950c4717-1014-402d-8fa7-d97f71d2c2e3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.809779] env[69796]: DEBUG nova.compute.manager [req-4036b26a-2d28-451d-ab79-c6253e9991cb req-33264e16-fed9-4dc8-84ad-efca66dc9dae service nova] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Detach interface failed, port_id=84c5a39e-6ab2-4353-8648-eb3fc939be20, reason: Instance 78da661c-9020-40d1-b2e7-bc844c0bdbb0 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1024.254822] env[69796]: INFO nova.compute.manager [-] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Took 1.53 seconds to deallocate network for instance. [ 1024.762070] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.762416] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.762589] env[69796]: DEBUG nova.objects.instance [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'resources' on Instance uuid 78da661c-9020-40d1-b2e7-bc844c0bdbb0 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1025.282924] env[69796]: DEBUG nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1025.298448] env[69796]: DEBUG nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1025.298683] env[69796]: DEBUG nova.compute.provider_tree [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1025.309875] env[69796]: DEBUG nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1025.327722] env[69796]: DEBUG nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1025.495942] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6928ebb0-13bc-4308-baaa-03efad13e0de {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.503334] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd1d17fb-989d-4799-ab29-729ccb2cf0d0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.535478] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c86b7e1-accf-412a-9bed-3bc6d12dc133 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.543484] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee35387-d2be-48e6-8ff7-7d8635ce7d77 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.556840] env[69796]: DEBUG nova.compute.provider_tree [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.059799] env[69796]: DEBUG nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.564492] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.582772] env[69796]: INFO nova.scheduler.client.report [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleted allocations for instance 78da661c-9020-40d1-b2e7-bc844c0bdbb0 [ 1027.090397] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0d79a1fc-25e4-4866-a1b7-e00ad0029ed6 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "78da661c-9020-40d1-b2e7-bc844c0bdbb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.992s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.407905] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.408256] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.910799] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1030.349075] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.353826] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.354043] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.354269] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.354512] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1030.354667] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1030.437016] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.437583] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.440138] env[69796]: INFO nova.compute.claims [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1031.612430] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a39880d-b710-4b02-b792-5677d126c547 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.620663] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1f0ce9-f570-434a-8de5-35b016af9fdf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.650534] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27ef123-1074-49a3-bc15-d7f90c848b2c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.658302] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a343f57-4a91-4ce1-a8ee-e8bb126e1ebd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.672760] env[69796]: DEBUG nova.compute.provider_tree [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1032.176212] env[69796]: DEBUG nova.scheduler.client.report [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.353631] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.681192] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.681736] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1032.857226] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.857494] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.857670] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.857826] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1032.858761] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45abc13f-620d-480d-9007-28a19e86b6b3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.867163] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d482590a-dbf0-4b08-8949-09635f7dbc23 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.881799] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f56266-efa8-4c7a-9c98-0552a1fc80bb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.888812] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d0d0df-4e22-48a7-ad18-61719535872d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.918378] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180367MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1032.918529] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.918760] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.186537] env[69796]: DEBUG nova.compute.utils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1033.189532] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1033.189743] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1033.295741] env[69796]: DEBUG nova.policy [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0caee67119e94ddb972fe671958a4aef', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a013be517fea4fe59a57059de0fbeff7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 1033.582639] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Successfully created port: 671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.692602] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1034.704090] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1034.727539] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1034.727779] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1034.727937] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1034.728148] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1034.728345] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1034.728516] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1034.728731] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1034.728895] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1034.729080] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1034.729251] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1034.729427] env[69796]: DEBUG nova.virt.hardware [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1034.730308] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed88394-53b7-4412-a38d-24210470f1b7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.738880] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab466c2-6abd-4ac9-b7c2-1fed7919c64d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.989157] env[69796]: DEBUG nova.compute.manager [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Received event network-vif-plugged-671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1034.989386] env[69796]: DEBUG oslo_concurrency.lockutils [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.989629] env[69796]: DEBUG oslo_concurrency.lockutils [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.989811] env[69796]: DEBUG oslo_concurrency.lockutils [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.989974] env[69796]: DEBUG nova.compute.manager [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] No waiting events found dispatching network-vif-plugged-671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1034.990408] env[69796]: WARNING nova.compute.manager [req-4ea7c615-22b2-4929-9bbf-389485a3692f req-cfe95c05-8e8f-40bb-b744-1dfaf3c9bcbb service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Received unexpected event network-vif-plugged-671fbda7-d5f6-4ff5-b749-205cf559e4cb for instance with vm_state building and task_state spawning. [ 1035.041802] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Successfully updated port: 671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.460969] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.461348] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463020] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463020] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463020] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463020] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463197] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463197] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463197] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463197] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463328] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463328] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463328] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463328] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7bb6fade-ece1-447e-8261-4b7f96c35479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1035.463511] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1035.463775] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1035.543959] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.544159] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.544363] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.626394] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e1a5f4-7933-4149-8148-d22c45e94812 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.635532] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df64af17-2f33-4116-a43c-6286d8f2965f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.666112] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63f1e08-90f2-43aa-9e49-c9fdd90f90a8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.674404] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7242485-cfb6-4ed7-b8d4-1f216215c856 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.689113] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.075863] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1036.192165] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.203501] env[69796]: DEBUG nova.network.neutron [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating instance_info_cache with network_info: [{"id": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "address": "fa:16:3e:8b:bf:85", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap671fbda7-d5", "ovs_interfaceid": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.697406] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69796) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1036.697621] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.779s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.706442] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.706787] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Instance network_info: |[{"id": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "address": "fa:16:3e:8b:bf:85", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap671fbda7-d5", "ovs_interfaceid": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1036.707345] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:bf:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '671fbda7-d5f6-4ff5-b749-205cf559e4cb', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1036.714860] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.715091] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1036.715368] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15ea5deb-91ef-46f1-aa2c-0170f57ec0a1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.735865] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1036.735865] env[69796]: value = "task-4234474" [ 1036.735865] env[69796]: _type = "Task" [ 1036.735865] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.743898] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234474, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.019177] env[69796]: DEBUG nova.compute.manager [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Received event network-changed-671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1037.019386] env[69796]: DEBUG nova.compute.manager [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Refreshing instance network info cache due to event network-changed-671fbda7-d5f6-4ff5-b749-205cf559e4cb. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1037.019663] env[69796]: DEBUG oslo_concurrency.lockutils [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] Acquiring lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.019817] env[69796]: DEBUG oslo_concurrency.lockutils [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] Acquired lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.019994] env[69796]: DEBUG nova.network.neutron [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Refreshing network info cache for port 671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1037.246273] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234474, 'name': CreateVM_Task, 'duration_secs': 0.29481} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.246624] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.247132] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.247334] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.247671] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1037.247936] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a9d57c8-7539-429f-bd0f-044ea548d01c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.253653] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1037.253653] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ef0b8d-8a97-e8df-c4ca-0515940b6b0f" [ 1037.253653] env[69796]: _type = "Task" [ 1037.253653] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.261764] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ef0b8d-8a97-e8df-c4ca-0515940b6b0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.698295] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1037.701079] env[69796]: DEBUG nova.network.neutron [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updated VIF entry in instance network info cache for port 671fbda7-d5f6-4ff5-b749-205cf559e4cb. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1037.701413] env[69796]: DEBUG nova.network.neutron [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating instance_info_cache with network_info: [{"id": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "address": "fa:16:3e:8b:bf:85", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap671fbda7-d5", "ovs_interfaceid": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.764737] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ef0b8d-8a97-e8df-c4ca-0515940b6b0f, 'name': SearchDatastore_Task, 'duration_secs': 0.011011} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.765048] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.765311] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1037.765547] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.765698] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1037.765886] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1037.766147] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be0047cb-aea9-4d27-bccf-30ad60b70b33 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.774191] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1037.774405] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1037.775104] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46d40039-2365-408a-a4c8-8011c4c77bf9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.780834] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1037.780834] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52976ecd-3424-6026-69e1-96d387c7ee58" [ 1037.780834] env[69796]: _type = "Task" [ 1037.780834] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.789808] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52976ecd-3424-6026-69e1-96d387c7ee58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.205056] env[69796]: DEBUG oslo_concurrency.lockutils [req-00731dc0-3782-4165-aa29-c0242f92bd9f req-f115bfc9-6d65-4cf3-9166-cfa0d97028c3 service nova] Releasing lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.205476] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1038.205659] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1038.291722] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52976ecd-3424-6026-69e1-96d387c7ee58, 'name': SearchDatastore_Task, 'duration_secs': 0.009377} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.292532] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59e700aa-1c59-40cd-8c8b-e3e9bdbe9522 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.297517] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1038.297517] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523557c6-b640-aa88-33b0-b2ae3fa2828c" [ 1038.297517] env[69796]: _type = "Task" [ 1038.297517] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.304932] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523557c6-b640-aa88-33b0-b2ae3fa2828c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.807939] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]523557c6-b640-aa88-33b0-b2ae3fa2828c, 'name': SearchDatastore_Task, 'duration_secs': 0.009463} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.808210] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1038.808482] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 7bb6fade-ece1-447e-8261-4b7f96c35479/7bb6fade-ece1-447e-8261-4b7f96c35479.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1038.808756] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cffe8ac-604f-4e05-918c-cdb7425acd38 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.816058] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1038.816058] env[69796]: value = "task-4234475" [ 1038.816058] env[69796]: _type = "Task" [ 1038.816058] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.824169] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.325829] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234475, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451672} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.326278] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 7bb6fade-ece1-447e-8261-4b7f96c35479/7bb6fade-ece1-447e-8261-4b7f96c35479.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.326396] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.326555] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b66bd79-9274-4aef-9f6f-4c6a02b15ef7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.333680] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1039.333680] env[69796]: value = "task-4234476" [ 1039.333680] env[69796]: _type = "Task" [ 1039.333680] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.342204] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234476, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.847434] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234476, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060127} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.847757] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.848711] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a33156-58e0-4ae5-9f67-a037932a6fca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.872992] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 7bb6fade-ece1-447e-8261-4b7f96c35479/7bb6fade-ece1-447e-8261-4b7f96c35479.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.873278] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa18479d-d9b4-4916-8000-9142710560b4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.893717] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1039.893717] env[69796]: value = "task-4234477" [ 1039.893717] env[69796]: _type = "Task" [ 1039.893717] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.902622] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234477, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.404795] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234477, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.905120] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234477, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.405516] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234477, 'name': ReconfigVM_Task, 'duration_secs': 1.142392} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.405880] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 7bb6fade-ece1-447e-8261-4b7f96c35479/7bb6fade-ece1-447e-8261-4b7f96c35479.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.406454] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06d12b14-2935-4114-a06b-efa8e53e0206 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.413089] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1041.413089] env[69796]: value = "task-4234478" [ 1041.413089] env[69796]: _type = "Task" [ 1041.413089] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.421233] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234478, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.923535] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234478, 'name': Rename_Task, 'duration_secs': 0.135086} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.923885] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.924167] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7271411a-1bf3-4e44-b0de-7614339a7e35 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.931650] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1041.931650] env[69796]: value = "task-4234479" [ 1041.931650] env[69796]: _type = "Task" [ 1041.931650] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.939989] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.442531] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234479, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.942938] env[69796]: DEBUG oslo_vmware.api [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234479, 'name': PowerOnVM_Task, 'duration_secs': 0.997605} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.943238] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.943448] env[69796]: INFO nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1042.943657] env[69796]: DEBUG nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1042.944511] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f6d867c-5497-4ddf-85bc-b80936222781 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.463083] env[69796]: INFO nova.compute.manager [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Took 13.05 seconds to build instance. [ 1043.964956] env[69796]: DEBUG oslo_concurrency.lockutils [None req-9708a9ac-0dea-4545-98ea-d2e45cd39ebb tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.557s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.612950] env[69796]: DEBUG nova.compute.manager [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Received event network-changed-671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1044.612950] env[69796]: DEBUG nova.compute.manager [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Refreshing instance network info cache due to event network-changed-671fbda7-d5f6-4ff5-b749-205cf559e4cb. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1044.613303] env[69796]: DEBUG oslo_concurrency.lockutils [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] Acquiring lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.613303] env[69796]: DEBUG oslo_concurrency.lockutils [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] Acquired lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.613428] env[69796]: DEBUG nova.network.neutron [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Refreshing network info cache for port 671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.320749] env[69796]: DEBUG nova.network.neutron [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updated VIF entry in instance network info cache for port 671fbda7-d5f6-4ff5-b749-205cf559e4cb. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1045.321127] env[69796]: DEBUG nova.network.neutron [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating instance_info_cache with network_info: [{"id": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "address": "fa:16:3e:8b:bf:85", "network": {"id": "cf63b262-bc3f-4dd1-bcad-ba359092ff79", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1600225792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a013be517fea4fe59a57059de0fbeff7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap671fbda7-d5", "ovs_interfaceid": "671fbda7-d5f6-4ff5-b749-205cf559e4cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.824028] env[69796]: DEBUG oslo_concurrency.lockutils [req-70ad1375-80e2-4372-8599-559e03a62423 req-a6377e33-a347-43b5-971c-9bd20d5d8252 service nova] Releasing lock "refresh_cache-7bb6fade-ece1-447e-8261-4b7f96c35479" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.890489] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.890785] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.394178] env[69796]: DEBUG nova.compute.utils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.897597] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.959478] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.959893] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.960022] env[69796]: INFO nova.compute.manager [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Attaching volume e5c776ba-ad12-48c3-a3b2-49f9470ecd62 to /dev/sdb [ 1083.991573] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18fa190-3861-4e9a-9d18-8198ce1cf366 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.999461] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa7442d-bef2-4760-b0f6-854c3dcdd6c8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.013051] env[69796]: DEBUG nova.virt.block_device [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating existing volume attachment record: af25dff8-68a7-477a-8e8c-fbae14bb4e21 {{(pid=69796) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1088.562808] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Volume attach. Driver type: vmdk {{(pid=69796) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1088.563084] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837834', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'name': 'volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7bb6fade-ece1-447e-8261-4b7f96c35479', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'serial': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1088.564019] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfdeb1d-1e03-4c14-bb72-30a9b8a8bdcb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.581087] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521bbbd2-584c-4a18-8cb3-9972a112600a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.608354] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfiguring VM instance instance-0000005f to attach disk [localhost-esx-install-datastore] volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62/volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1088.608464] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30cd0196-035f-4c24-9aa6-84e431d33d40 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.627941] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1088.627941] env[69796]: value = "task-4234482" [ 1088.627941] env[69796]: _type = "Task" [ 1088.627941] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.636455] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234482, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.138652] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234482, 'name': ReconfigVM_Task, 'duration_secs': 0.2086} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.138962] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfigured VM instance instance-0000005f to attach disk [localhost-esx-install-datastore] volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62/volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62.vmdk or device None with type thin {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1089.143636] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39e65718-2727-4b16-9f34-32a37165cbf7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.159662] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1089.159662] env[69796]: value = "task-4234483" [ 1089.159662] env[69796]: _type = "Task" [ 1089.159662] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.168043] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.669907] env[69796]: DEBUG oslo_vmware.api [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234483, 'name': ReconfigVM_Task, 'duration_secs': 0.147056} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.670287] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837834', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'name': 'volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7bb6fade-ece1-447e-8261-4b7f96c35479', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'serial': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62'} {{(pid=69796) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1090.353614] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.353829] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.353980] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1090.705236] env[69796]: DEBUG nova.objects.instance [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'flavor' on Instance uuid 7bb6fade-ece1-447e-8261-4b7f96c35479 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.211317] env[69796]: DEBUG oslo_concurrency.lockutils [None req-5668e9cf-b9b5-4b82-b0d3-124ae7d29da2 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.251s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.382331] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.382586] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.885678] env[69796]: INFO nova.compute.manager [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Detaching volume e5c776ba-ad12-48c3-a3b2-49f9470ecd62 [ 1091.916491] env[69796]: INFO nova.virt.block_device [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Attempting to driver detach volume e5c776ba-ad12-48c3-a3b2-49f9470ecd62 from mountpoint /dev/sdb [ 1091.916738] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Volume detach. Driver type: vmdk {{(pid=69796) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1091.916931] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837834', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'name': 'volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7bb6fade-ece1-447e-8261-4b7f96c35479', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'serial': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1091.917829] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15220071-c879-44e6-96a1-3d9e73eecf0b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.940694] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8369958-91a9-41a1-a3d1-1ffd9a61859d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.948746] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4eae3f8-b850-45c7-8b0d-f9a5e252453a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.969314] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1629d26e-adcd-4d44-a576-38246329010a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.983956] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] The volume has not been displaced from its original location: [localhost-esx-install-datastore] volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62/volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62.vmdk. No consolidation needed. {{(pid=69796) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1091.989189] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1091.989483] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9728d854-b1e5-4e81-ae8c-56db3b8901fd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.007323] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1092.007323] env[69796]: value = "task-4234484" [ 1092.007323] env[69796]: _type = "Task" [ 1092.007323] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.015096] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234484, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.349025] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.353740] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.353966] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.354132] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1092.516868] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234484, 'name': ReconfigVM_Task, 'duration_secs': 0.209678} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.517170] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=69796) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1092.521805] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-620f5c1c-0b9f-4ecb-83b7-da3fdcbfd5f5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.537389] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1092.537389] env[69796]: value = "task-4234485" [ 1092.537389] env[69796]: _type = "Task" [ 1092.537389] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.545233] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.857491] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.857798] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.857986] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.858167] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1092.859063] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f31db552-f990-4cf1-af14-d8f32db9771f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.867166] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb48c709-8820-4b55-b305-209f5de124a4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.881461] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dbf57f7-ca90-4ced-8c54-14880e0b239b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.888033] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8425c560-d88a-41a5-8ee3-88620013dda3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.919286] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180716MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1092.919462] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.919653] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.046977] env[69796]: DEBUG oslo_vmware.api [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234485, 'name': ReconfigVM_Task, 'duration_secs': 0.140754} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.047232] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-837834', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'name': 'volume-e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7bb6fade-ece1-447e-8261-4b7f96c35479', 'attached_at': '', 'detached_at': '', 'volume_id': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62', 'serial': 'e5c776ba-ad12-48c3-a3b2-49f9470ecd62'} {{(pid=69796) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1093.598588] env[69796]: DEBUG nova.objects.instance [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'flavor' on Instance uuid 7bb6fade-ece1-447e-8261-4b7f96c35479 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.952743] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953018] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953058] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953157] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953272] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953387] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953501] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953616] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953730] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953846] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.953979] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.954115] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.954230] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.954375] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7bb6fade-ece1-447e-8261-4b7f96c35479 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1093.954586] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1093.954770] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3200MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1094.106498] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c442a66a-d48c-4294-aa74-38f5971a282c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.114835] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7443ad-c878-40d5-966e-0e2d75ba2b5f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.145790] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0a9c7e-1fad-4474-8a39-e89592b79e78 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.153773] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d672a8ff-90ab-45f2-939b-68d3346febec {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.901878] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b8e3f0c8-1be3-4903-88cb-42f9e8157b9c tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.519s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.903170] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1095.423193] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-3ea31caa-098a-491e-acc5-1b85a1e94497] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3ea31caa-098a-491e-acc5-1b85a1e94497"}]} [ 1095.423585] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.504s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.424068] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1095.424068] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1095.424068] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1095.424068] env[69796]: ERROR nova.compute.manager yield [ 1095.424068] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1095.424068] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 1095.424068] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1095.424068] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1095.424068] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-3ea31caa-098a-491e-acc5-1b85a1e94497"}]} [ 1095.424068] env[69796]: ERROR nova.compute.manager [ 1095.424068] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 1095.424068] env[69796]: ERROR nova.compute.manager [ 1095.424451] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 1095.424451] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 1095.424451] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1095.424451] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 1095.424451] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1095.424451] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1095.424451] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1095.424451] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1095.424451] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 1095.424451] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1095.424927] env[69796]: ERROR nova.compute.manager raise value [ 1095.424927] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1095.424927] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1095.424927] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1095.424927] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 1095.424927] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1095.424927] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 1095.424927] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1095.424927] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 1095.424927] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1095.424927] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 1095.424927] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1095.424927] env[69796]: ERROR nova.compute.manager [ 1095.620197] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.620464] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.620681] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.620866] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.621053] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.623258] env[69796]: INFO nova.compute.manager [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Terminating instance [ 1096.127629] env[69796]: DEBUG nova.compute.manager [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.127939] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.128837] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a354ef1-d571-4c56-8d7c-be48104bfa19 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.136900] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.137159] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d9955be-22b0-45bb-a529-220c7de0e45f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.143568] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1096.143568] env[69796]: value = "task-4234486" [ 1096.143568] env[69796]: _type = "Task" [ 1096.143568] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.152062] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.654419] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234486, 'name': PowerOffVM_Task, 'duration_secs': 0.184705} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.654795] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.654892] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.655160] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5304abe2-20e8-4f9e-8a9f-b5e881ad89b2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.716926] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.717155] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.717345] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleting the datastore file [datastore2] 7bb6fade-ece1-447e-8261-4b7f96c35479 {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.717621] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a7b2736-d686-4334-8370-457361ebccaf {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.725269] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for the task: (returnval){ [ 1096.725269] env[69796]: value = "task-4234488" [ 1096.725269] env[69796]: _type = "Task" [ 1096.725269] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.733233] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.235473] env[69796]: DEBUG oslo_vmware.api [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Task: {'id': task-4234488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137336} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.235742] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.235921] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.236111] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.236287] env[69796]: INFO nova.compute.manager [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1097.236526] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.236720] env[69796]: DEBUG nova.compute.manager [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.236816] env[69796]: DEBUG nova.network.neutron [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1097.681617] env[69796]: DEBUG nova.compute.manager [req-0fa5e9b0-199a-4306-b34c-0cce7f89878b req-caab8a30-5f80-4619-8e21-90c8d3d40b40 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Received event network-vif-deleted-671fbda7-d5f6-4ff5-b749-205cf559e4cb {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1097.681910] env[69796]: INFO nova.compute.manager [req-0fa5e9b0-199a-4306-b34c-0cce7f89878b req-caab8a30-5f80-4619-8e21-90c8d3d40b40 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Neutron deleted interface 671fbda7-d5f6-4ff5-b749-205cf559e4cb; detaching it from the instance and deleting it from the info cache [ 1097.681910] env[69796]: DEBUG nova.network.neutron [req-0fa5e9b0-199a-4306-b34c-0cce7f89878b req-caab8a30-5f80-4619-8e21-90c8d3d40b40 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.160689] env[69796]: DEBUG nova.network.neutron [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.184177] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c65d95a1-2e72-4580-b64d-c89e2ad38945 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.195768] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587ea1b7-68f6-4bc7-9079-86cbd51a1f81 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.219834] env[69796]: DEBUG nova.compute.manager [req-0fa5e9b0-199a-4306-b34c-0cce7f89878b req-caab8a30-5f80-4619-8e21-90c8d3d40b40 service nova] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Detach interface failed, port_id=671fbda7-d5f6-4ff5-b749-205cf559e4cb, reason: Instance 7bb6fade-ece1-447e-8261-4b7f96c35479 could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1098.423815] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.424087] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.664118] env[69796]: INFO nova.compute.manager [-] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Took 1.43 seconds to deallocate network for instance. [ 1099.171064] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.171483] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.171591] env[69796]: DEBUG nova.objects.instance [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lazy-loading 'resources' on Instance uuid 7bb6fade-ece1-447e-8261-4b7f96c35479 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.693509] env[69796]: DEBUG nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1099.709611] env[69796]: DEBUG nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1099.709829] env[69796]: DEBUG nova.compute.provider_tree [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1099.722673] env[69796]: DEBUG nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1099.743146] env[69796]: DEBUG nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1099.932161] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d2292f-fb35-4339-a076-be10b69dc5ad {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.940736] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2369fc-b4a1-4c09-b75d-5a960e95a91c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.971223] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8987b8d-2a41-47c1-937f-a242da94380e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.979656] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6e79ee-2d3b-49f7-b41d-3bb798c8878c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.995256] env[69796]: DEBUG nova.compute.provider_tree [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.499382] env[69796]: DEBUG nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.004773] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.023057] env[69796]: INFO nova.scheduler.client.report [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Deleted allocations for instance 7bb6fade-ece1-447e-8261-4b7f96c35479 [ 1101.530465] env[69796]: DEBUG oslo_concurrency.lockutils [None req-0919d89e-5d50-406e-a424-fd3089e9b658 tempest-AttachVolumeNegativeTest-1688662533 tempest-AttachVolumeNegativeTest-1688662533-project-member] Lock "7bb6fade-ece1-447e-8261-4b7f96c35479" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.910s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.354032] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.354359] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.354739] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1152.354774] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.349767] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.353321] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.353510] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.857201] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.857487] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.857662] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.857826] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1154.858829] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325da71a-df14-47d3-b7f3-bf27cd95d65e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.867425] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a61fc7d-0b6d-40c6-8409-05f29e05fa9f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.881312] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e409b8f6-48f5-454e-bcd4-3b576bf0c45e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.887494] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8474b8-f709-44d1-84c0-bd80b3973656 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.917672] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180537MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1154.917799] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.917980] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.951169] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951493] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951493] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951614] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951677] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951791] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.951909] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952033] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952154] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952265] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952374] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952484] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952594] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1155.952791] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1155.952926] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1156.100574] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94f504f-d0b6-4692-a6ac-fe4ea5a2223a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.108474] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b3c35b-07cd-4c66-86f9-ce1ad6940079 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.137298] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781d80ff-5feb-46af-b118-42f755fb0b0c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.144766] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d2e41-1677-4fe6-8f0f-39b577b2457a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.159012] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.662595] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.167942] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69796) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1157.168361] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.250s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.168584] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.169052] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.349928] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.336154] env[69796]: INFO nova.compute.manager [None req-fa5fd9e2-ff83-496d-a544-b9bedad89992 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Get console output [ 1176.336570] env[69796]: WARNING nova.virt.vmwareapi.driver [None req-fa5fd9e2-ff83-496d-a544-b9bedad89992 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] The console log is missing. Check your VSPC configuration [ 1178.419738] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.420117] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.923464] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1179.453582] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.453946] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.455373] env[69796]: INFO nova.compute.claims [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.633253] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e01ec88-9979-4bbb-b100-bba711c47f9d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.641462] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a3a937-8339-4ce7-91b0-bcc6f2342f47 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.672080] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3413dc55-f736-4426-8118-053a236fdf98 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.680267] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf38cb88-5864-4df5-9404-5bebb87d522b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.694018] env[69796]: DEBUG nova.compute.provider_tree [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.197738] env[69796]: DEBUG nova.scheduler.client.report [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.702586] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.248s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.703153] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1182.208413] env[69796]: DEBUG nova.compute.utils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1182.210123] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1182.210327] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1182.260439] env[69796]: DEBUG nova.policy [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdcf2d22c98b45ad92e219e24b285b44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03701784af4041e29a23e885800ea39b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 1182.528291] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Successfully created port: 318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.715443] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1183.726385] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.750677] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.750920] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.751096] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.751315] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.751467] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.751619] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.751866] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.752018] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.752178] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.752952] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.752952] env[69796]: DEBUG nova.virt.hardware [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.753374] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85feeaca-05d7-4990-bee7-0dffd764a3fb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.761743] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97024392-f829-4556-8393-28363ba9ecba {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.877159] env[69796]: DEBUG nova.compute.manager [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received event network-vif-plugged-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1183.877159] env[69796]: DEBUG oslo_concurrency.lockutils [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.877159] env[69796]: DEBUG oslo_concurrency.lockutils [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.877159] env[69796]: DEBUG oslo_concurrency.lockutils [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.877159] env[69796]: DEBUG nova.compute.manager [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] No waiting events found dispatching network-vif-plugged-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1183.877940] env[69796]: WARNING nova.compute.manager [req-a0d21042-0735-4d5c-b1db-ebdeb20a9cb0 req-b18d0720-1c36-454a-97e8-4b6e89fbeca9 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received unexpected event network-vif-plugged-318463f1-256b-4de8-bcbd-55025e35fbb8 for instance with vm_state building and task_state spawning. [ 1183.956979] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Successfully updated port: 318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1184.460124] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.460337] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.460623] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.994450] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1185.114644] env[69796]: DEBUG nova.network.neutron [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updating instance_info_cache with network_info: [{"id": "318463f1-256b-4de8-bcbd-55025e35fbb8", "address": "fa:16:3e:85:9e:61", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap318463f1-25", "ovs_interfaceid": "318463f1-256b-4de8-bcbd-55025e35fbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1185.617635] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.618014] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance network_info: |[{"id": "318463f1-256b-4de8-bcbd-55025e35fbb8", "address": "fa:16:3e:85:9e:61", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap318463f1-25", "ovs_interfaceid": "318463f1-256b-4de8-bcbd-55025e35fbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.618509] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:9e:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '318463f1-256b-4de8-bcbd-55025e35fbb8', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.626187] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.626401] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.626624] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c37b2d2-814c-4c73-a646-a218397707f9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.647065] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.647065] env[69796]: value = "task-4234491" [ 1185.647065] env[69796]: _type = "Task" [ 1185.647065] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.655020] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234491, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.904973] env[69796]: DEBUG nova.compute.manager [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received event network-changed-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1185.905058] env[69796]: DEBUG nova.compute.manager [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Refreshing instance network info cache due to event network-changed-318463f1-256b-4de8-bcbd-55025e35fbb8. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1185.905435] env[69796]: DEBUG oslo_concurrency.lockutils [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] Acquiring lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.905435] env[69796]: DEBUG oslo_concurrency.lockutils [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] Acquired lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.905638] env[69796]: DEBUG nova.network.neutron [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Refreshing network info cache for port 318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1186.158285] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234491, 'name': CreateVM_Task, 'duration_secs': 0.286886} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.158732] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1186.159146] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.159329] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.159677] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1186.159964] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14b306a0-a1c5-4b02-a223-03db393ed924 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.165404] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1186.165404] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52440234-8ced-a6b2-5a0a-6618d019521b" [ 1186.165404] env[69796]: _type = "Task" [ 1186.165404] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.174286] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52440234-8ced-a6b2-5a0a-6618d019521b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.596041] env[69796]: DEBUG nova.network.neutron [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updated VIF entry in instance network info cache for port 318463f1-256b-4de8-bcbd-55025e35fbb8. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.596487] env[69796]: DEBUG nova.network.neutron [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updating instance_info_cache with network_info: [{"id": "318463f1-256b-4de8-bcbd-55025e35fbb8", "address": "fa:16:3e:85:9e:61", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap318463f1-25", "ovs_interfaceid": "318463f1-256b-4de8-bcbd-55025e35fbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.676764] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52440234-8ced-a6b2-5a0a-6618d019521b, 'name': SearchDatastore_Task, 'duration_secs': 0.011105} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.676992] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.677244] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.677481] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.677624] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.677805] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.678105] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bebf94d-0ace-44f2-91d0-9634e1cd65f7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.687847] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.688093] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.688858] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbb1ae73-a1bb-4649-8060-fc64dc082160 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.695618] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1186.695618] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f9607b-86f9-e1e6-5933-62af7d9d99f2" [ 1186.695618] env[69796]: _type = "Task" [ 1186.695618] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.704742] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f9607b-86f9-e1e6-5933-62af7d9d99f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.100117] env[69796]: DEBUG oslo_concurrency.lockutils [req-20955988-50f4-4b0a-bdc5-74d204169b4f req-12301788-b741-4ce7-a6b0-d1a4c828ba1b service nova] Releasing lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.206528] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f9607b-86f9-e1e6-5933-62af7d9d99f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010579} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.207349] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0495dfea-4078-4046-9c57-6b1a72ffbcf1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.213412] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1187.213412] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2dfd9-0e5e-623d-d7fb-04015d98ba70" [ 1187.213412] env[69796]: _type = "Task" [ 1187.213412] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.221645] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2dfd9-0e5e-623d-d7fb-04015d98ba70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.723748] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52b2dfd9-0e5e-623d-d7fb-04015d98ba70, 'name': SearchDatastore_Task, 'duration_secs': 0.010852} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.723961] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.724240] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 2941cb59-151e-4931-be3b-79e2830d793a/2941cb59-151e-4931-be3b-79e2830d793a.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1187.724506] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c600dc63-9ef3-4c01-9252-2e9c482602e1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.731859] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1187.731859] env[69796]: value = "task-4234492" [ 1187.731859] env[69796]: _type = "Task" [ 1187.731859] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.739483] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234492, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.242110] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234492, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478311} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.242461] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 2941cb59-151e-4931-be3b-79e2830d793a/2941cb59-151e-4931-be3b-79e2830d793a.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1188.242646] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1188.242936] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc5d6db4-588c-494c-a64d-79ae49309aea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.249539] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1188.249539] env[69796]: value = "task-4234493" [ 1188.249539] env[69796]: _type = "Task" [ 1188.249539] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.257295] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.759701] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062476} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.760017] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.760853] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b632930-5d8f-4505-97f5-6dfc04682ab2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.782693] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 2941cb59-151e-4931-be3b-79e2830d793a/2941cb59-151e-4931-be3b-79e2830d793a.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.782947] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff5036ca-6472-4969-b496-fd3230b768a1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.802269] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1188.802269] env[69796]: value = "task-4234494" [ 1188.802269] env[69796]: _type = "Task" [ 1188.802269] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.811380] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234494, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.312687] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234494, 'name': ReconfigVM_Task, 'duration_secs': 0.286212} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.313119] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 2941cb59-151e-4931-be3b-79e2830d793a/2941cb59-151e-4931-be3b-79e2830d793a.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.313631] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecb2779f-e74f-46dc-b6a0-9dccb5f98c63 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.320508] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1189.320508] env[69796]: value = "task-4234495" [ 1189.320508] env[69796]: _type = "Task" [ 1189.320508] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.328987] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234495, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.831320] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234495, 'name': Rename_Task, 'duration_secs': 0.135268} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.831624] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.831872] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5177e45-5163-44c3-9db6-fa3416b50441 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.838705] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1189.838705] env[69796]: value = "task-4234496" [ 1189.838705] env[69796]: _type = "Task" [ 1189.838705] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.845894] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.349381] env[69796]: DEBUG oslo_vmware.api [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234496, 'name': PowerOnVM_Task, 'duration_secs': 0.439908} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.349890] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.349890] env[69796]: INFO nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1190.351389] env[69796]: DEBUG nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1190.351389] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf5cfde-df89-4647-8890-ba66358b0d02 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.869040] env[69796]: INFO nova.compute.manager [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Took 11.43 seconds to build instance. [ 1191.371339] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d05df0b4-4ded-4bf1-9239-c30ef04eb91d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.951s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.798462] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.798792] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.799128] env[69796]: INFO nova.compute.manager [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Shelving [ 1192.809290] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1192.809692] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a390ee1-df47-4602-9fe2-b28ae71c6bae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.817287] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1192.817287] env[69796]: value = "task-4234497" [ 1192.817287] env[69796]: _type = "Task" [ 1192.817287] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.825943] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234497, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.327080] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234497, 'name': PowerOffVM_Task, 'duration_secs': 0.230422} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.327387] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1193.328199] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25149816-3ac0-4ebc-8fe1-3210a89f93c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.347436] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017f71db-4b00-4785-9b98-eb094f90690b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.857987] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Creating Snapshot of the VM instance {{(pid=69796) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1193.858303] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c162fd65-fea8-4ac5-8913-12310f09909d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.866262] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1193.866262] env[69796]: value = "task-4234498" [ 1193.866262] env[69796]: _type = "Task" [ 1193.866262] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.874587] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234498, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.378720] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234498, 'name': CreateSnapshot_Task, 'duration_secs': 0.409445} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.379172] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Created Snapshot of the VM instance {{(pid=69796) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1194.380152] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1023ad4-ac3d-4947-808b-8b2e242f1cf3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.898977] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Creating linked-clone VM from snapshot {{(pid=69796) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1194.899381] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-afc68092-d641-41ee-ab75-8e4cdbb72fea {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.908349] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1194.908349] env[69796]: value = "task-4234499" [ 1194.908349] env[69796]: _type = "Task" [ 1194.908349] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.916988] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234499, 'name': CloneVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.418624] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234499, 'name': CloneVM_Task} progress is 94%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.919237] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234499, 'name': CloneVM_Task} progress is 100%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.420477] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234499, 'name': CloneVM_Task, 'duration_secs': 1.04148} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.420797] env[69796]: INFO nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Created linked-clone VM from snapshot [ 1196.421538] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315864fc-dfa7-4d6c-90c7-36554cfb3db7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.428689] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Uploading image f2923da1-7a1c-4d16-939a-4349045da32e {{(pid=69796) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1196.452067] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1196.452067] env[69796]: value = "vm-837837" [ 1196.452067] env[69796]: _type = "VirtualMachine" [ 1196.452067] env[69796]: }. {{(pid=69796) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1196.452401] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0ebc9e6c-cf32-441d-8389-4180263468d8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.460180] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lease: (returnval){ [ 1196.460180] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f83683-acc7-d2cb-3131-229f4deffd74" [ 1196.460180] env[69796]: _type = "HttpNfcLease" [ 1196.460180] env[69796]: } obtained for exporting VM: (result){ [ 1196.460180] env[69796]: value = "vm-837837" [ 1196.460180] env[69796]: _type = "VirtualMachine" [ 1196.460180] env[69796]: }. {{(pid=69796) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1196.460468] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the lease: (returnval){ [ 1196.460468] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f83683-acc7-d2cb-3131-229f4deffd74" [ 1196.460468] env[69796]: _type = "HttpNfcLease" [ 1196.460468] env[69796]: } to be ready. {{(pid=69796) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1196.467488] env[69796]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1196.467488] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f83683-acc7-d2cb-3131-229f4deffd74" [ 1196.467488] env[69796]: _type = "HttpNfcLease" [ 1196.467488] env[69796]: } is initializing. {{(pid=69796) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1196.969576] env[69796]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1196.969576] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f83683-acc7-d2cb-3131-229f4deffd74" [ 1196.969576] env[69796]: _type = "HttpNfcLease" [ 1196.969576] env[69796]: } is ready. {{(pid=69796) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1196.970097] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1196.970097] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52f83683-acc7-d2cb-3131-229f4deffd74" [ 1196.970097] env[69796]: _type = "HttpNfcLease" [ 1196.970097] env[69796]: }. {{(pid=69796) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1196.970674] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfce993-9115-4acc-9109-04ab4fa1b3f7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.978871] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk from lease info. {{(pid=69796) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1196.979083] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk for reading. {{(pid=69796) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1197.067048] env[69796]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-01254a9b-704d-4c33-ab9c-6b3cb5279954 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.177855] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk. {{(pid=69796) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1204.178947] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc0a566-48f9-4ee5-a54c-29f7941c27ef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.185583] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk is in state: ready. {{(pid=69796) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1204.185792] env[69796]: ERROR oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk due to incomplete transfer. [ 1204.186064] env[69796]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d5fd8a7d-d0ea-4276-8214-1b8ac49420ec {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.193846] env[69796]: DEBUG oslo_vmware.rw_handles [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52494014-c83c-62d1-e325-1ead85bb8696/disk-0.vmdk. {{(pid=69796) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1204.194061] env[69796]: DEBUG nova.virt.vmwareapi.images [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Uploaded image f2923da1-7a1c-4d16-939a-4349045da32e to the Glance image server {{(pid=69796) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1204.196069] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Destroying the VM {{(pid=69796) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1204.196321] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8bb75143-7c73-4063-ac68-0cc19ebf5d76 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.202766] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1204.202766] env[69796]: value = "task-4234501" [ 1204.202766] env[69796]: _type = "Task" [ 1204.202766] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.211681] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234501, 'name': Destroy_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.713348] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234501, 'name': Destroy_Task, 'duration_secs': 0.345197} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.713597] env[69796]: INFO nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Destroyed the VM [ 1204.713868] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Deleting Snapshot of the VM instance {{(pid=69796) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1204.714148] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef2e0e8b-ea61-4431-80ef-197753b29c12 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.720550] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1204.720550] env[69796]: value = "task-4234502" [ 1204.720550] env[69796]: _type = "Task" [ 1204.720550] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.728601] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234502, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.231020] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234502, 'name': RemoveSnapshot_Task, 'duration_secs': 0.414477} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.231389] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Deleted Snapshot of the VM instance {{(pid=69796) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1205.231580] env[69796]: DEBUG nova.compute.manager [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1205.232371] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926fa485-d47a-4f7f-a27f-45fe065cf72f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.745339] env[69796]: INFO nova.compute.manager [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Shelve offloading [ 1206.248947] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1206.249359] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b02305af-3971-4641-987a-d99fa7037186 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.257648] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1206.257648] env[69796]: value = "task-4234503" [ 1206.257648] env[69796]: _type = "Task" [ 1206.257648] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.266890] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] VM already powered off {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1206.267137] env[69796]: DEBUG nova.compute.manager [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1206.267887] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c911008-821a-4852-a826-25a73ee49cd9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.274402] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.274567] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.274735] env[69796]: DEBUG nova.network.neutron [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1206.960779] env[69796]: DEBUG nova.network.neutron [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updating instance_info_cache with network_info: [{"id": "318463f1-256b-4de8-bcbd-55025e35fbb8", "address": "fa:16:3e:85:9e:61", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap318463f1-25", "ovs_interfaceid": "318463f1-256b-4de8-bcbd-55025e35fbb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.463773] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.690738] env[69796]: DEBUG nova.compute.manager [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received event network-vif-unplugged-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1207.690951] env[69796]: DEBUG oslo_concurrency.lockutils [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.691190] env[69796]: DEBUG oslo_concurrency.lockutils [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.691336] env[69796]: DEBUG oslo_concurrency.lockutils [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.691508] env[69796]: DEBUG nova.compute.manager [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] No waiting events found dispatching network-vif-unplugged-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1207.691676] env[69796]: WARNING nova.compute.manager [req-d1580dec-aa42-4b9b-ac2e-b7685c75566e req-2eca26d5-cece-4fd0-a09d-1a6bbb3b0ce0 service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received unexpected event network-vif-unplugged-318463f1-256b-4de8-bcbd-55025e35fbb8 for instance with vm_state shelved and task_state shelving_offloading. [ 1207.717498] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1207.718426] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6a7edf-6f79-472a-be63-67caab09f867 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.726421] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1207.726662] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-274137a9-cb59-4266-9def-009a53de1a54 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.790633] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1207.790917] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1207.791197] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleting the datastore file [datastore2] 2941cb59-151e-4931-be3b-79e2830d793a {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1207.791505] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76ba949f-6230-4733-a08c-4559156b830e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.798767] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1207.798767] env[69796]: value = "task-4234505" [ 1207.798767] env[69796]: _type = "Task" [ 1207.798767] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.807783] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234505, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.308967] env[69796]: DEBUG oslo_vmware.api [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234505, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135708} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.309298] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1208.309500] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1208.309678] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1208.332035] env[69796]: INFO nova.scheduler.client.report [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted allocations for instance 2941cb59-151e-4931-be3b-79e2830d793a [ 1208.837742] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1208.838150] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.838322] env[69796]: DEBUG nova.objects.instance [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'resources' on Instance uuid 2941cb59-151e-4931-be3b-79e2830d793a {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.341660] env[69796]: DEBUG nova.objects.instance [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'numa_topology' on Instance uuid 2941cb59-151e-4931-be3b-79e2830d793a {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1209.718219] env[69796]: DEBUG nova.compute.manager [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Received event network-changed-318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1209.718219] env[69796]: DEBUG nova.compute.manager [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Refreshing instance network info cache due to event network-changed-318463f1-256b-4de8-bcbd-55025e35fbb8. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1209.718219] env[69796]: DEBUG oslo_concurrency.lockutils [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] Acquiring lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.718219] env[69796]: DEBUG oslo_concurrency.lockutils [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] Acquired lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.718394] env[69796]: DEBUG nova.network.neutron [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Refreshing network info cache for port 318463f1-256b-4de8-bcbd-55025e35fbb8 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.723498] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.843753] env[69796]: DEBUG nova.objects.base [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Object Instance<2941cb59-151e-4931-be3b-79e2830d793a> lazy-loaded attributes: resources,numa_topology {{(pid=69796) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1209.983941] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b889873a-c793-486a-9f61-e024511dda6d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.992012] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-881d38c6-d0ad-4353-b4a8-076f75750197 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.024227] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea45f012-954e-4c7f-aafb-ab8a35498eb5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.031961] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97a0e47-a08e-4f68-9ebe-7636e70ccafa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.045179] env[69796]: DEBUG nova.compute.provider_tree [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1210.416842] env[69796]: DEBUG nova.network.neutron [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updated VIF entry in instance network info cache for port 318463f1-256b-4de8-bcbd-55025e35fbb8. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.417240] env[69796]: DEBUG nova.network.neutron [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updating instance_info_cache with network_info: [{"id": "318463f1-256b-4de8-bcbd-55025e35fbb8", "address": "fa:16:3e:85:9e:61", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": null, "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap318463f1-25", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.548552] env[69796]: DEBUG nova.scheduler.client.report [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1210.920783] env[69796]: DEBUG oslo_concurrency.lockutils [req-c86d8e6b-b50f-4091-87c6-2299462a6aed req-d2b58b96-bd7f-42ef-a240-d5f94dc4526b service nova] Releasing lock "refresh_cache-2941cb59-151e-4931-be3b-79e2830d793a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.054029] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.216s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.353373] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.353571] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1211.562979] env[69796]: DEBUG oslo_concurrency.lockutils [None req-a38b487b-684f-4dd5-844f-4ac16c26c446 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 19.764s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.563960] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.840s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.564213] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "2941cb59-151e-4931-be3b-79e2830d793a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.564471] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.564645] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.566351] env[69796]: INFO nova.compute.manager [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Terminating instance [ 1212.070087] env[69796]: DEBUG nova.compute.manager [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1212.070497] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.070715] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-410fb8e6-3f32-41c5-a1c7-b672ca2466e9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.080910] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c5b761-03a7-4d01-9353-07bbb9ed6d40 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.104123] env[69796]: WARNING nova.virt.vmwareapi.vmops [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2941cb59-151e-4931-be3b-79e2830d793a could not be found. [ 1212.104338] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1212.104519] env[69796]: INFO nova.compute.manager [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1212.104767] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1212.104986] env[69796]: DEBUG nova.compute.manager [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1212.105097] env[69796]: DEBUG nova.network.neutron [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1212.354644] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.806713] env[69796]: DEBUG nova.network.neutron [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.309273] env[69796]: INFO nova.compute.manager [-] [instance: 2941cb59-151e-4931-be3b-79e2830d793a] Took 1.20 seconds to deallocate network for instance. [ 1213.354076] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.336271] env[69796]: DEBUG oslo_concurrency.lockutils [None req-4eeaa102-dd80-42fc-85e2-9e1bbf3682cd tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "2941cb59-151e-4931-be3b-79e2830d793a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.772s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.353033] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1214.855500] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.855784] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.855960] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.856138] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1214.857215] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b06040-1305-4d48-b11d-151009b0e043 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.865413] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9a0393a-87f7-4b48-b631-d9ff9f8f2dc9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.880284] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f556d64-3726-4072-afcb-b54e76ac06fa {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.887270] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b13b1-ccf2-478a-b1cd-b226ee4b6c62 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.918759] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180729MB free_disk=1GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1214.918966] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.919345] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.608606] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.608971] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.949800] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.949962] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950105] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950239] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950352] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950470] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950584] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950697] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950810] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.950922] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.951047] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.951163] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1215.951285] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1216.111727] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1216.454153] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9af7f7df-6660-4770-b05a-4d6cc48d161a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1216.454388] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1216.454545] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1216.627736] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.633596] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776ae4c0-c577-402c-bb1d-06ebfd7fdaa5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.641419] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afa4c8d-2979-4da2-b925-9f7c32047e2c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.671951] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8438d4cd-aedf-49ab-a6b8-e9c982f67116 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.679542] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc9468d-bbe4-4e51-bd26-a9ce65a526d2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.693886] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.197585] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.702811] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69796) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1217.703216] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.784s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.703322] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.076s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.704843] env[69796]: INFO nova.compute.claims [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1218.703318] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.703745] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.703828] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.703987] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.878161] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ad32c4-b57e-4950-90ac-868b3a9ebd68 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.886142] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129b0a68-ab16-4cd2-9b49-19caef8d4410 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.916062] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b127a0e8-c941-4a12-9fca-d88e1dc5be0d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.923977] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd45386-026c-4b12-8d38-c154863ea396 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.939483] env[69796]: DEBUG nova.compute.provider_tree [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1219.443213] env[69796]: DEBUG nova.scheduler.client.report [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.948151] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.948708] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1220.453713] env[69796]: DEBUG nova.compute.utils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1220.455268] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1220.456110] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1220.519127] env[69796]: DEBUG nova.policy [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdcf2d22c98b45ad92e219e24b285b44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03701784af4041e29a23e885800ea39b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 1220.810821] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Successfully created port: a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1220.959031] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1221.970082] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1221.995756] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.996097] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.996271] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.996469] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.996614] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.996762] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.996973] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.997179] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.997379] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.997552] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.997729] env[69796]: DEBUG nova.virt.hardware [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.998603] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5952c2ba-eb22-4152-b5bb-921bf7b83922 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.006808] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a6e81e-a52f-4f33-854e-4a5ffdbffdca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.168447] env[69796]: DEBUG nova.compute.manager [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Received event network-vif-plugged-a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1222.168674] env[69796]: DEBUG oslo_concurrency.lockutils [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] Acquiring lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1222.168886] env[69796]: DEBUG oslo_concurrency.lockutils [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.169069] env[69796]: DEBUG oslo_concurrency.lockutils [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.169261] env[69796]: DEBUG nova.compute.manager [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] No waiting events found dispatching network-vif-plugged-a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1222.169438] env[69796]: WARNING nova.compute.manager [req-37b6601b-94aa-4228-8247-dbfbe74ca94a req-3eede812-bd5f-4618-80b3-cf895b2a4394 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Received unexpected event network-vif-plugged-a4a7628c-6272-4835-bee4-d5bd79750438 for instance with vm_state building and task_state spawning. [ 1222.254031] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Successfully updated port: a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1222.757268] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.757496] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.757673] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1223.288568] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1223.405774] env[69796]: DEBUG nova.network.neutron [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Updating instance_info_cache with network_info: [{"id": "a4a7628c-6272-4835-bee4-d5bd79750438", "address": "fa:16:3e:ce:1f:d0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a7628c-62", "ovs_interfaceid": "a4a7628c-6272-4835-bee4-d5bd79750438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.908393] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.908729] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Instance network_info: |[{"id": "a4a7628c-6272-4835-bee4-d5bd79750438", "address": "fa:16:3e:ce:1f:d0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a7628c-62", "ovs_interfaceid": "a4a7628c-6272-4835-bee4-d5bd79750438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1223.909225] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:1f:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4a7628c-6272-4835-bee4-d5bd79750438', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1223.916771] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.916987] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1223.917258] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-edf81fa5-7d03-4240-8347-01014b080347 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.938648] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1223.938648] env[69796]: value = "task-4234506" [ 1223.938648] env[69796]: _type = "Task" [ 1223.938648] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.946729] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234506, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.196978] env[69796]: DEBUG nova.compute.manager [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Received event network-changed-a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1224.197222] env[69796]: DEBUG nova.compute.manager [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Refreshing instance network info cache due to event network-changed-a4a7628c-6272-4835-bee4-d5bd79750438. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1224.197459] env[69796]: DEBUG oslo_concurrency.lockutils [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] Acquiring lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.197610] env[69796]: DEBUG oslo_concurrency.lockutils [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] Acquired lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.197775] env[69796]: DEBUG nova.network.neutron [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Refreshing network info cache for port a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.448833] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234506, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.884250] env[69796]: DEBUG nova.network.neutron [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Updated VIF entry in instance network info cache for port a4a7628c-6272-4835-bee4-d5bd79750438. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.884622] env[69796]: DEBUG nova.network.neutron [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Updating instance_info_cache with network_info: [{"id": "a4a7628c-6272-4835-bee4-d5bd79750438", "address": "fa:16:3e:ce:1f:d0", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4a7628c-62", "ovs_interfaceid": "a4a7628c-6272-4835-bee4-d5bd79750438", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.949046] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234506, 'name': CreateVM_Task} progress is 99%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.387690] env[69796]: DEBUG oslo_concurrency.lockutils [req-547d7227-f3ed-4b11-a881-b0abb0e4b1e6 req-ef56a2c8-ab50-4957-9e6f-9948dbae0663 service nova] Releasing lock "refresh_cache-9af7f7df-6660-4770-b05a-4d6cc48d161a" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.449781] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234506, 'name': CreateVM_Task, 'duration_secs': 1.368189} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.450189] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1225.450673] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.450848] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.451214] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1225.451503] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bca2d28-73dd-41b8-848c-0e334bfd9390 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.456753] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1225.456753] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e84f9e-50f3-c02b-c126-51392985c1c0" [ 1225.456753] env[69796]: _type = "Task" [ 1225.456753] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.465531] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e84f9e-50f3-c02b-c126-51392985c1c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.967806] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e84f9e-50f3-c02b-c126-51392985c1c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010272} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.968121] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.968361] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1225.968599] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.968750] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.968933] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1225.969222] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5238d6a8-09cd-484f-b7f9-cf5598c9b0e4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.978380] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1225.978566] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1225.979350] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-766ed1a6-4b02-4645-944e-2f2f837382fe {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.984797] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1225.984797] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e0b35a-afd5-c2d4-2b78-37a9ee73df32" [ 1225.984797] env[69796]: _type = "Task" [ 1225.984797] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.993401] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e0b35a-afd5-c2d4-2b78-37a9ee73df32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1226.495850] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52e0b35a-afd5-c2d4-2b78-37a9ee73df32, 'name': SearchDatastore_Task, 'duration_secs': 0.009467} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1226.496673] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-480e6c39-aa0c-424a-97e0-f4477506be94 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.502080] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1226.502080] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52df6a16-1bfd-a36c-21a1-3803e2a53bd4" [ 1226.502080] env[69796]: _type = "Task" [ 1226.502080] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1226.509915] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52df6a16-1bfd-a36c-21a1-3803e2a53bd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.014281] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52df6a16-1bfd-a36c-21a1-3803e2a53bd4, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.014566] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.014823] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 9af7f7df-6660-4770-b05a-4d6cc48d161a/9af7f7df-6660-4770-b05a-4d6cc48d161a.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1227.015102] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6dec393-0475-4e20-ab04-6003d4f71f79 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.021665] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1227.021665] env[69796]: value = "task-4234507" [ 1227.021665] env[69796]: _type = "Task" [ 1227.021665] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.029499] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234507, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.531948] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234507, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.033291] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234507, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523775} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.033626] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] 9af7f7df-6660-4770-b05a-4d6cc48d161a/9af7f7df-6660-4770-b05a-4d6cc48d161a.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1228.033855] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1228.034128] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6ce29ae-1d20-4a61-9357-294b3e49e31c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.043374] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1228.043374] env[69796]: value = "task-4234508" [ 1228.043374] env[69796]: _type = "Task" [ 1228.043374] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.053053] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234508, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.553017] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234508, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070989} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.553396] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1228.554111] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028392d5-b573-48d4-8d41-3b73ceaf33e3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.576216] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 9af7f7df-6660-4770-b05a-4d6cc48d161a/9af7f7df-6660-4770-b05a-4d6cc48d161a.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1228.576480] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-354aa96e-4676-49ad-a162-53fb5121e351 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.595395] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1228.595395] env[69796]: value = "task-4234509" [ 1228.595395] env[69796]: _type = "Task" [ 1228.595395] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.603566] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.105955] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234509, 'name': ReconfigVM_Task, 'duration_secs': 0.332911} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.106400] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 9af7f7df-6660-4770-b05a-4d6cc48d161a/9af7f7df-6660-4770-b05a-4d6cc48d161a.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1229.107057] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-701ae058-9818-46d3-ab76-68871bd8b75f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.115867] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1229.115867] env[69796]: value = "task-4234510" [ 1229.115867] env[69796]: _type = "Task" [ 1229.115867] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.126574] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234510, 'name': Rename_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.626405] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234510, 'name': Rename_Task, 'duration_secs': 0.266158} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.626809] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1229.626930] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68f25b19-9e57-4eb7-9512-eaa720eeb8c6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.633913] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1229.633913] env[69796]: value = "task-4234511" [ 1229.633913] env[69796]: _type = "Task" [ 1229.633913] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.642167] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.143994] env[69796]: DEBUG oslo_vmware.api [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234511, 'name': PowerOnVM_Task, 'duration_secs': 0.495996} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.144282] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1230.144496] env[69796]: INFO nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Took 8.17 seconds to spawn the instance on the hypervisor. [ 1230.144675] env[69796]: DEBUG nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1230.145524] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea0174a-1a77-4df6-9be5-419d159014f2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.662119] env[69796]: INFO nova.compute.manager [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Took 14.05 seconds to build instance. [ 1230.858647] env[69796]: DEBUG oslo_concurrency.lockutils [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.165024] env[69796]: DEBUG oslo_concurrency.lockutils [None req-12ddd636-1049-4c98-b498-22b7887380d7 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.556s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.165490] env[69796]: DEBUG oslo_concurrency.lockutils [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.307s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.165763] env[69796]: DEBUG nova.compute.manager [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1231.166794] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8e79ae-1d92-42ed-b5eb-b58f9b111e95 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.174187] env[69796]: DEBUG nova.compute.manager [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69796) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1231.174756] env[69796]: DEBUG nova.objects.instance [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'flavor' on Instance uuid 9af7f7df-6660-4770-b05a-4d6cc48d161a {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1232.182516] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Powering off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1232.182910] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-452a6fd1-2cf3-417f-bc57-9775762a55ae {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.190824] env[69796]: DEBUG oslo_vmware.api [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1232.190824] env[69796]: value = "task-4234512" [ 1232.190824] env[69796]: _type = "Task" [ 1232.190824] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.199589] env[69796]: DEBUG oslo_vmware.api [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.701330] env[69796]: DEBUG oslo_vmware.api [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234512, 'name': PowerOffVM_Task, 'duration_secs': 0.220431} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.701606] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Powered off the VM {{(pid=69796) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1232.701850] env[69796]: DEBUG nova.compute.manager [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1232.702647] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e31462-d209-4958-8244-31a086c43ae5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.213742] env[69796]: DEBUG oslo_concurrency.lockutils [None req-693c5719-d2c5-4db5-90da-3ed9d2964b1d tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.304755] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.305174] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.305316] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.305516] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.305692] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.307852] env[69796]: INFO nova.compute.manager [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Terminating instance [ 1234.811654] env[69796]: DEBUG nova.compute.manager [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1234.811955] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1234.812882] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2acc264-0106-4f7b-ab78-82a189e4229f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.820755] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1234.821013] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a6c24cf-ee05-49d4-a541-292d735ece71 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.882935] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1234.882935] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1234.882935] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleting the datastore file [datastore2] 9af7f7df-6660-4770-b05a-4d6cc48d161a {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1234.883269] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d588da67-8dff-4b1a-a315-f562f700143a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.889534] env[69796]: DEBUG oslo_vmware.api [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1234.889534] env[69796]: value = "task-4234514" [ 1234.889534] env[69796]: _type = "Task" [ 1234.889534] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.898662] env[69796]: DEBUG oslo_vmware.api [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.399701] env[69796]: DEBUG oslo_vmware.api [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143965} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.400088] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1235.400165] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1235.400346] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1235.400529] env[69796]: INFO nova.compute.manager [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1235.400768] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1235.400960] env[69796]: DEBUG nova.compute.manager [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1235.401066] env[69796]: DEBUG nova.network.neutron [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1235.664978] env[69796]: DEBUG nova.compute.manager [req-2ba026f8-4c0b-4161-9c78-5e553df3ecda req-4745d9ac-cff9-4f30-a367-fbf467e0f8be service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Received event network-vif-deleted-a4a7628c-6272-4835-bee4-d5bd79750438 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1235.665230] env[69796]: INFO nova.compute.manager [req-2ba026f8-4c0b-4161-9c78-5e553df3ecda req-4745d9ac-cff9-4f30-a367-fbf467e0f8be service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Neutron deleted interface a4a7628c-6272-4835-bee4-d5bd79750438; detaching it from the instance and deleting it from the info cache [ 1235.665823] env[69796]: DEBUG nova.network.neutron [req-2ba026f8-4c0b-4161-9c78-5e553df3ecda req-4745d9ac-cff9-4f30-a367-fbf467e0f8be service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.151036] env[69796]: DEBUG nova.network.neutron [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.169600] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a233bbe6-b028-4178-b743-987e57d081ee {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.179555] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81002dda-0e64-4d76-ab64-5b7c999d1051 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.203414] env[69796]: DEBUG nova.compute.manager [req-2ba026f8-4c0b-4161-9c78-5e553df3ecda req-4745d9ac-cff9-4f30-a367-fbf467e0f8be service nova] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Detach interface failed, port_id=a4a7628c-6272-4835-bee4-d5bd79750438, reason: Instance 9af7f7df-6660-4770-b05a-4d6cc48d161a could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1236.654447] env[69796]: INFO nova.compute.manager [-] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Took 1.25 seconds to deallocate network for instance. [ 1237.161169] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.161499] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.161740] env[69796]: DEBUG nova.objects.instance [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'resources' on Instance uuid 9af7f7df-6660-4770-b05a-4d6cc48d161a {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1237.830691] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893dbf74-d63e-4781-9f2a-bfa214db0b9d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.840334] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f5d5e8-af5e-4d1e-81b2-549acb46de93 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.880174] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b052b96b-f16d-4a31-bb63-6032751c5492 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.888768] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a405013-a447-4e53-901a-200314108671 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.902747] env[69796]: DEBUG nova.compute.provider_tree [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.407129] env[69796]: DEBUG nova.scheduler.client.report [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.911615] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.944783] env[69796]: INFO nova.scheduler.client.report [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted allocations for instance 9af7f7df-6660-4770-b05a-4d6cc48d161a [ 1239.452056] env[69796]: DEBUG oslo_concurrency.lockutils [None req-08b1713d-265a-432c-b029-8556abf20ca2 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "9af7f7df-6660-4770-b05a-4d6cc48d161a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.147s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.146417] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.147021] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.649790] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1241.169538] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.169990] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.171703] env[69796]: INFO nova.compute.claims [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1242.328928] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473b616b-ac5b-4b1d-8e38-bd07b1711abb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.336846] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb8c98e-e2b4-4909-99ed-d6b5ea25d5ed {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.366124] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c26d352-f09e-4cd4-a623-2f73fda64c2a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.373380] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72746066-8480-4247-a708-5383a835c355 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.386279] env[69796]: DEBUG nova.compute.provider_tree [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed in ProviderTree for provider: dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.890061] env[69796]: DEBUG nova.scheduler.client.report [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Inventory has not changed for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1243.395235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.395868] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Start building networks asynchronously for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1243.900920] env[69796]: DEBUG nova.compute.utils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Using /dev/sd instead of None {{(pid=69796) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1243.902458] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Allocating IP information in the background. {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1243.902667] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] allocate_for_instance() {{(pid=69796) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1243.949731] env[69796]: DEBUG nova.policy [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdcf2d22c98b45ad92e219e24b285b44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '03701784af4041e29a23e885800ea39b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69796) authorize /opt/stack/nova/nova/policy.py:192}} [ 1244.221755] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Successfully created port: 1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1244.407720] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Start building block device mappings for instance. {{(pid=69796) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1245.415839] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Start spawning the instance on the hypervisor. {{(pid=69796) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1245.441436] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-19T13:17:38Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-19T13:17:18Z,direct_url=,disk_format='vmdk',id=11e211db-44f8-4e34-8fec-8b87ab3fce6f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='21c30a7ac31746b2847e98c1cec76d99',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-19T13:17:19Z,virtual_size=,visibility=), allow threads: False {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1245.441714] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1245.441875] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image limits 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1245.442070] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Flavor pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1245.442220] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Image pref 0:0:0 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1245.442369] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69796) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1245.442592] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1245.442775] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1245.442949] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Got 1 possible topologies {{(pid=69796) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1245.443127] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1245.443302] env[69796]: DEBUG nova.virt.hardware [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69796) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1245.444390] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff6838b-8c87-4a8c-8396-6cbc941b3118 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.453080] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b2d2f8-cdfb-46e1-b078-1e43a31c42a4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.575528] env[69796]: DEBUG nova.compute.manager [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Received event network-vif-plugged-1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1245.575755] env[69796]: DEBUG oslo_concurrency.lockutils [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.575984] env[69796]: DEBUG oslo_concurrency.lockutils [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.576150] env[69796]: DEBUG oslo_concurrency.lockutils [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.576319] env[69796]: DEBUG nova.compute.manager [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] No waiting events found dispatching network-vif-plugged-1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) pop_instance_event /opt/stack/nova/nova/compute/manager.py:321}} [ 1245.576477] env[69796]: WARNING nova.compute.manager [req-dc4191d9-c680-40b7-a981-b2674b72871e req-97006e18-7494-4211-a42d-6839d6fcb3de service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Received unexpected event network-vif-plugged-1b0effd2-7bf0-4c21-b385-f4fd20590150 for instance with vm_state building and task_state spawning. [ 1245.657792] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Successfully updated port: 1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1246.160691] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.160912] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.161177] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1246.693316] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1246.814114] env[69796]: DEBUG nova.network.neutron [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [{"id": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "address": "fa:16:3e:6d:94:0a", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b0effd2-7b", "ovs_interfaceid": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.317503] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1247.317886] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance network_info: |[{"id": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "address": "fa:16:3e:6d:94:0a", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b0effd2-7b", "ovs_interfaceid": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69796) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1247.318405] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:94:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '20e3f794-c7a3-4696-9488-ecf34c570ef9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b0effd2-7bf0-4c21-b385-f4fd20590150', 'vif_model': 'vmxnet3'}] {{(pid=69796) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1247.325936] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1247.326176] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Creating VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1247.326404] env[69796]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23464337-a3c5-48c8-a8e3-429783faa801 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.347298] env[69796]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1247.347298] env[69796]: value = "task-4234515" [ 1247.347298] env[69796]: _type = "Task" [ 1247.347298] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.355359] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234515, 'name': CreateVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.604727] env[69796]: DEBUG nova.compute.manager [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Received event network-changed-1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1247.604934] env[69796]: DEBUG nova.compute.manager [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Refreshing instance network info cache due to event network-changed-1b0effd2-7bf0-4c21-b385-f4fd20590150. {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11773}} [ 1247.605186] env[69796]: DEBUG oslo_concurrency.lockutils [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] Acquiring lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.605296] env[69796]: DEBUG oslo_concurrency.lockutils [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] Acquired lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.605458] env[69796]: DEBUG nova.network.neutron [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Refreshing network info cache for port 1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1247.860046] env[69796]: DEBUG oslo_vmware.api [-] Task: {'id': task-4234515, 'name': CreateVM_Task, 'duration_secs': 0.341988} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.860046] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Created VM on the ESX host {{(pid=69796) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1247.860471] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.860471] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.860719] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1247.860980] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9969388a-1323-4a19-994a-33c0a0192cb5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.865707] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1247.865707] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa299f-3068-d674-f2cf-922b101cefe1" [ 1247.865707] env[69796]: _type = "Task" [ 1247.865707] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.874698] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa299f-3068-d674-f2cf-922b101cefe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.296065] env[69796]: DEBUG nova.network.neutron [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updated VIF entry in instance network info cache for port 1b0effd2-7bf0-4c21-b385-f4fd20590150. {{(pid=69796) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1248.296524] env[69796]: DEBUG nova.network.neutron [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [{"id": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "address": "fa:16:3e:6d:94:0a", "network": {"id": "5a0c0aad-861c-4670-bdaf-a8803f45f87b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-228955846-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "03701784af4041e29a23e885800ea39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "20e3f794-c7a3-4696-9488-ecf34c570ef9", "external-id": "nsx-vlan-transportzone-509", "segmentation_id": 509, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b0effd2-7b", "ovs_interfaceid": "1b0effd2-7bf0-4c21-b385-f4fd20590150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.376383] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52aa299f-3068-d674-f2cf-922b101cefe1, 'name': SearchDatastore_Task, 'duration_secs': 0.010771} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.376700] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.376913] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Processing image 11e211db-44f8-4e34-8fec-8b87ab3fce6f {{(pid=69796) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1248.377172] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.377323] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.377502] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.377767] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffb8f7ac-5b8c-4620-980c-9c60e283c992 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.386689] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69796) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.386870] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69796) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1248.387577] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baf7e037-cc1a-45e8-8e45-bc81e8804d5f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.394732] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1248.394732] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3b41-524f-5e72-56fe-71d7efc77c88" [ 1248.394732] env[69796]: _type = "Task" [ 1248.394732] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.402946] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3b41-524f-5e72-56fe-71d7efc77c88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.799441] env[69796]: DEBUG oslo_concurrency.lockutils [req-a4817e8d-5775-41f0-b5ba-96f359781d28 req-b44d6bac-a924-4b59-a3ba-4b7ad564f31f service nova] Releasing lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.905667] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52cc3b41-524f-5e72-56fe-71d7efc77c88, 'name': SearchDatastore_Task, 'duration_secs': 0.008811} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.906593] env[69796]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90dbbe66-3d9f-4ea1-a00b-44416958f614 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.912445] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1248.912445] env[69796]: value = "session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ffa561-2389-0e2c-efe1-68f8327e5bca" [ 1248.912445] env[69796]: _type = "Task" [ 1248.912445] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.920826] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ffa561-2389-0e2c-efe1-68f8327e5bca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.422757] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': session[52d5d017-4d2b-18a5-c895-13c1c915c00e]52ffa561-2389-0e2c-efe1-68f8327e5bca, 'name': SearchDatastore_Task, 'duration_secs': 0.031486} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.423046] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "[datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.423331] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] a6da1715-36d0-4837-90cd-f2d62afa1ced/a6da1715-36d0-4837-90cd-f2d62afa1ced.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1249.423604] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5948d36b-b42d-45b2-9537-291cb95759be {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.430883] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1249.430883] env[69796]: value = "task-4234516" [ 1249.430883] env[69796]: _type = "Task" [ 1249.430883] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.439150] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234516, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.940812] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234516, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457261} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.941574] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/11e211db-44f8-4e34-8fec-8b87ab3fce6f/11e211db-44f8-4e34-8fec-8b87ab3fce6f.vmdk to [datastore2] a6da1715-36d0-4837-90cd-f2d62afa1ced/a6da1715-36d0-4837-90cd-f2d62afa1ced.vmdk {{(pid=69796) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1249.941987] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Extending root virtual disk to 1048576 {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1249.942373] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad52a933-d552-44ea-a7da-6f3746636358 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.948813] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1249.948813] env[69796]: value = "task-4234517" [ 1249.948813] env[69796]: _type = "Task" [ 1249.948813] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.957362] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.459385] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054775} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.461389] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Extended root virtual disk {{(pid=69796) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1250.461389] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cb352e-76cb-4e95-8792-4c37b8d8fdb9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.482660] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] a6da1715-36d0-4837-90cd-f2d62afa1ced/a6da1715-36d0-4837-90cd-f2d62afa1ced.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1250.482940] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ceedd06-e4e6-4295-bb25-4f646cee53b5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.503829] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1250.503829] env[69796]: value = "task-4234518" [ 1250.503829] env[69796]: _type = "Task" [ 1250.503829] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.512211] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.014612] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234518, 'name': ReconfigVM_Task, 'duration_secs': 0.326841} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.015044] env[69796]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Reconfigured VM instance instance-00000062 to attach disk [datastore2] a6da1715-36d0-4837-90cd-f2d62afa1ced/a6da1715-36d0-4837-90cd-f2d62afa1ced.vmdk or device None with type sparse {{(pid=69796) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1251.015491] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fec9f212-9ca5-483d-ae29-15c5b0a1beca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.022551] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1251.022551] env[69796]: value = "task-4234519" [ 1251.022551] env[69796]: _type = "Task" [ 1251.022551] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.030704] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234519, 'name': Rename_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.532600] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234519, 'name': Rename_Task, 'duration_secs': 0.163771} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.532896] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Powering on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1251.533178] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f85a714a-24a2-4925-b1aa-1fc328f877c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.541242] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1251.541242] env[69796]: value = "task-4234520" [ 1251.541242] env[69796]: _type = "Task" [ 1251.541242] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.551605] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.051985] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234520, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.552308] env[69796]: DEBUG oslo_vmware.api [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234520, 'name': PowerOnVM_Task, 'duration_secs': 0.917796} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.552586] env[69796]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Powered on the VM {{(pid=69796) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1252.552792] env[69796]: INFO nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1252.552974] env[69796]: DEBUG nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1252.553782] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a017e0b-1438-49b3-8dc8-b60eef745eb6 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.072381] env[69796]: INFO nova.compute.manager [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 11.92 seconds to build instance. [ 1253.575216] env[69796]: DEBUG oslo_concurrency.lockutils [None req-d5203fed-57e5-4e26-8555-94994cfa25db tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.428s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.296207] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b979042-d510-4eb1-b96f-da18653ae63b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.303546] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Suspending the VM {{(pid=69796) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1254.303819] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-b2619ce2-3876-4d4c-ae4d-7b2379a5678f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.310653] env[69796]: DEBUG oslo_vmware.api [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1254.310653] env[69796]: value = "task-4234521" [ 1254.310653] env[69796]: _type = "Task" [ 1254.310653] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.318735] env[69796]: DEBUG oslo_vmware.api [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234521, 'name': SuspendVM_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.820962] env[69796]: DEBUG oslo_vmware.api [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234521, 'name': SuspendVM_Task} progress is 58%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.321684] env[69796]: DEBUG oslo_vmware.api [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234521, 'name': SuspendVM_Task, 'duration_secs': 0.668876} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.322101] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Suspended the VM {{(pid=69796) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1255.322187] env[69796]: DEBUG nova.compute.manager [None req-b3546546-e055-47fc-8fa3-6f12ce11012f tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Checking state {{(pid=69796) _get_power_state /opt/stack/nova/nova/compute/manager.py:1797}} [ 1255.322961] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39b08c6-71ea-402b-9b0d-1fe3d2d4b9b5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.664338] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.664799] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.665162] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.665462] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1256.665751] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.667978] env[69796]: INFO nova.compute.manager [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Terminating instance [ 1257.171579] env[69796]: DEBUG nova.compute.manager [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1257.171850] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.172793] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ca3e2f-dcdd-4341-a00d-bd7b1c7fe29a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.180517] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Unregistering the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1257.180789] env[69796]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5d1f8a03-ac1c-46ab-9484-9225cd119a22 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.244994] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Unregistered the VM {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1257.245253] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Deleting contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1257.245477] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleting the datastore file [datastore2] a6da1715-36d0-4837-90cd-f2d62afa1ced {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1257.245761] env[69796]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-441b5d22-736e-4c2f-98de-85555315d1d5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.252740] env[69796]: DEBUG oslo_vmware.api [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for the task: (returnval){ [ 1257.252740] env[69796]: value = "task-4234523" [ 1257.252740] env[69796]: _type = "Task" [ 1257.252740] env[69796]: } to complete. {{(pid=69796) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.261097] env[69796]: DEBUG oslo_vmware.api [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.762395] env[69796]: DEBUG oslo_vmware.api [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Task: {'id': task-4234523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186316} completed successfully. {{(pid=69796) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.762768] env[69796]: DEBUG nova.virt.vmwareapi.ds_util [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted the datastore file {{(pid=69796) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1257.762971] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Deleted contents of the VM from datastore datastore2 {{(pid=69796) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1257.763116] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1257.763296] env[69796]: INFO nova.compute.manager [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1257.763539] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1257.763734] env[69796]: DEBUG nova.compute.manager [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1257.763833] env[69796]: DEBUG nova.network.neutron [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1258.013717] env[69796]: DEBUG nova.compute.manager [req-2731a5f5-0d91-454a-8211-abc52a2d92ee req-e2efd784-683e-4959-81fe-631c686d46df service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Received event network-vif-deleted-1b0effd2-7bf0-4c21-b385-f4fd20590150 {{(pid=69796) external_instance_event /opt/stack/nova/nova/compute/manager.py:11768}} [ 1258.013978] env[69796]: INFO nova.compute.manager [req-2731a5f5-0d91-454a-8211-abc52a2d92ee req-e2efd784-683e-4959-81fe-631c686d46df service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Neutron deleted interface 1b0effd2-7bf0-4c21-b385-f4fd20590150; detaching it from the instance and deleting it from the info cache [ 1258.014351] env[69796]: DEBUG nova.network.neutron [req-2731a5f5-0d91-454a-8211-abc52a2d92ee req-e2efd784-683e-4959-81fe-631c686d46df service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.496463] env[69796]: DEBUG nova.network.neutron [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.517692] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec89e4b6-2cac-45f1-bb0a-5c5a636905a5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.527455] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fece79ba-a760-4b37-90e5-7fc9fed77952 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.553774] env[69796]: DEBUG nova.compute.manager [req-2731a5f5-0d91-454a-8211-abc52a2d92ee req-e2efd784-683e-4959-81fe-631c686d46df service nova] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Detach interface failed, port_id=1b0effd2-7bf0-4c21-b385-f4fd20590150, reason: Instance a6da1715-36d0-4837-90cd-f2d62afa1ced could not be found. {{(pid=69796) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11602}} [ 1258.999754] env[69796]: INFO nova.compute.manager [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 1.24 seconds to deallocate network for instance. [ 1259.506342] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.506637] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.506868] env[69796]: DEBUG nova.objects.instance [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'resources' on Instance uuid a6da1715-36d0-4837-90cd-f2d62afa1ced {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.171059] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fab53a-c48d-4dd5-89be-d1f544f20810 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.178774] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fe30c3-0712-48e7-8bb3-77253b367676 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.208929] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7adad58-37d9-4ff9-908a-f0c3e6c2e30b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.217047] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f14e1f-9f44-4ad3-9efd-e362eebcf2a9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.230287] env[69796]: DEBUG nova.compute.provider_tree [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1260.750282] env[69796]: ERROR nova.scheduler.client.report [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [req-10d047c4-501c-411b-8196-6301c21a6340] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10d047c4-501c-411b-8196-6301c21a6340"}]} [ 1260.750620] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.244s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.751316] env[69796]: ERROR nova.compute.manager [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Traceback (most recent call last): [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] yield [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self.set_inventory_for_provider( [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1260.751316] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10d047c4-501c-411b-8196-6301c21a6340"}]} [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] During handling of the above exception, another exception occurred: [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Traceback (most recent call last): [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self._delete_instance(context, instance, bdms) [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1260.751620] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self._complete_deletion(context, instance) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self._update_resource_tracker(context, instance) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self.rt.update_usage(context, instance, instance.node) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] return f(*args, **kwargs) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self._update(context.elevated(), self.compute_nodes[nodename]) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self._update_to_placement(context, compute_node, startup) [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1260.751933] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] return attempt.get(self._wrap_exception) [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] raise value [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self.reportclient.update_from_provider_tree( [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] with catch_all(pd.uuid): [ 1260.752318] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1260.752732] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] self.gen.throw(typ, value, traceback) [ 1260.752732] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1260.752732] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] raise exception.ResourceProviderSyncFailed() [ 1260.752732] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1260.752732] env[69796]: ERROR nova.compute.manager [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] [ 1261.256089] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.591s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.512403] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "97040b95-5602-4808-afe2-7d7a868b0d80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.512752] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "97040b95-5602-4808-afe2-7d7a868b0d80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.769151] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.769416] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.769610] env[69796]: DEBUG oslo_concurrency.lockutils [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.769836] env[69796]: INFO nova.compute.manager [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Successfully reverted task state from None on failure for instance. [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server [None req-856873a2-952c-40e5-a4a8-89d329b0eeb0 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server yield [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10d047c4-501c-411b-8196-6301c21a6340"}]} [ 1262.773717] env[69796]: ERROR oslo_messaging.rpc.server [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1262.774229] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1262.774867] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 1262.775307] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1262.775726] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1262.776486] env[69796]: ERROR oslo_messaging.rpc.server [ 1263.015245] env[69796]: DEBUG nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1263.538039] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.538398] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.540291] env[69796]: INFO nova.compute.claims [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1264.568904] env[69796]: DEBUG nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1264.584358] env[69796]: DEBUG nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1264.584605] env[69796]: DEBUG nova.compute.provider_tree [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1264.594977] env[69796]: DEBUG nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1264.613539] env[69796]: DEBUG nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1264.772702] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b6295f-7b2c-4744-b269-0d63b1778f42 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.780516] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f547b8-02b9-4a79-9ed8-1c3c74ad4087 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.811168] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db512db-6b33-43ef-9426-011ad24ed6a8 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.818820] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab21371d-da53-4cb5-a470-70a8dd785755 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.832080] env[69796]: DEBUG nova.compute.provider_tree [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1265.353432] env[69796]: ERROR nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [req-bed73960-5503-4f97-bf67-6000b5acf148] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-bed73960-5503-4f97-bf67-6000b5acf148"}]} [ 1265.353769] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.815s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.354369] env[69796]: ERROR nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Traceback (most recent call last): [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] yield [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] self.set_inventory_for_provider( [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1265.354369] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-bed73960-5503-4f97-bf67-6000b5acf148"}]} [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] During handling of the above exception, another exception occurred: [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Traceback (most recent call last): [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] with self.rt.instance_claim(context, instance, node, allocs, [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1265.354624] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] return f(*args, **kwargs) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] self._update(elevated, cn) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] self._update_to_placement(context, compute_node, startup) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] return attempt.get(self._wrap_exception) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] raise value [ 1265.354914] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] self.reportclient.update_from_provider_tree( [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] with catch_all(pd.uuid): [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] self.gen.throw(typ, value, traceback) [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] raise exception.ResourceProviderSyncFailed() [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1265.355321] env[69796]: ERROR nova.compute.manager [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] [ 1265.355639] env[69796]: DEBUG nova.compute.utils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1265.356870] env[69796]: DEBUG nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Build of instance 97040b95-5602-4808-afe2-7d7a868b0d80 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1265.357318] env[69796]: DEBUG nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1265.357551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-97040b95-5602-4808-afe2-7d7a868b0d80" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1265.357709] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-97040b95-5602-4808-afe2-7d7a868b0d80" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1265.357867] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1265.877171] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1265.958584] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1266.461138] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-97040b95-5602-4808-afe2-7d7a868b0d80" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1266.461381] env[69796]: DEBUG nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1266.461540] env[69796]: DEBUG nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1266.461712] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1266.477754] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1266.980161] env[69796]: DEBUG nova.network.neutron [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1267.483402] env[69796]: INFO nova.compute.manager [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: 97040b95-5602-4808-afe2-7d7a868b0d80] Took 1.02 seconds to deallocate network for instance. [ 1268.354017] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1268.354404] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Cleaning up deleted instances {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11865}} [ 1268.515079] env[69796]: INFO nova.scheduler.client.report [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted allocations for instance 97040b95-5602-4808-afe2-7d7a868b0d80 [ 1268.858342] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] There are 3 instances to clean {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11874}} [ 1268.858582] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 9af7f7df-6660-4770-b05a-4d6cc48d161a] Instance has had 0 of 5 cleanup attempts {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1269.023115] env[69796]: DEBUG oslo_concurrency.lockutils [None req-29999bb7-d3a0-47ed-8975-df01186442fb tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "97040b95-5602-4808-afe2-7d7a868b0d80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.510s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.363295] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 7bb6fade-ece1-447e-8261-4b7f96c35479] Instance has had 0 of 5 cleanup attempts {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1269.867026] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [instance: 78da661c-9020-40d1-b2e7-bc844c0bdbb0] Instance has had 0 of 5 cleanup attempts {{(pid=69796) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11878}} [ 1270.129551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.129826] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.130049] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.130253] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.130429] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.133177] env[69796]: INFO nova.compute.manager [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Terminating instance [ 1270.636841] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.637312] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.637312] env[69796]: DEBUG nova.network.neutron [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1271.159971] env[69796]: DEBUG nova.network.neutron [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1271.239676] env[69796]: DEBUG nova.network.neutron [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.743065] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-a6da1715-36d0-4837-90cd-f2d62afa1ced" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.743511] env[69796]: DEBUG nova.compute.manager [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1271.743714] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1271.744060] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45263d84-e0df-4688-9ffb-a1bc8739169c {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.753546] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedaf13c-735d-4e93-ab5b-9e9220801f25 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.776247] env[69796]: WARNING nova.virt.vmwareapi.vmops [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a6da1715-36d0-4837-90cd-f2d62afa1ced could not be found. [ 1271.776247] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1271.776454] env[69796]: INFO nova.compute.manager [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1271.776688] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1271.776905] env[69796]: DEBUG nova.compute.manager [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1271.776997] env[69796]: DEBUG nova.network.neutron [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1271.792714] env[69796]: DEBUG nova.network.neutron [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1272.295660] env[69796]: DEBUG nova.network.neutron [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.798794] env[69796]: INFO nova.compute.manager [-] [instance: a6da1715-36d0-4837-90cd-f2d62afa1ced] Took 1.02 seconds to deallocate network for instance. [ 1273.306025] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.306299] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.306502] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.327970] env[69796]: INFO nova.scheduler.client.report [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Deleted allocations for instance a6da1715-36d0-4837-90cd-f2d62afa1ced [ 1273.835974] env[69796]: DEBUG oslo_concurrency.lockutils [None req-ca7f1315-76f4-48c8-aa5f-d7d077b35764 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "a6da1715-36d0-4837-90cd-f2d62afa1ced" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.705s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.369931] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.370156] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.370316] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1274.563301] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.563550] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.563778] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.564054] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.564272] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.566398] env[69796]: INFO nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Terminating instance [ 1275.070173] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.070577] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquired lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.070577] env[69796]: DEBUG nova.network.neutron [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.354323] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.354548] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.591919] env[69796]: DEBUG nova.network.neutron [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1275.661562] env[69796]: DEBUG nova.network.neutron [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.164091] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Releasing lock "refresh_cache-d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.164779] env[69796]: DEBUG nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Start destroying the instance on the hypervisor. {{(pid=69796) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1276.165157] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5054b767-de0a-4851-95a1-7e0395b8b113 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.175417] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534e47ed-709f-4dc1-aa1c-ff39c1f531fd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.198951] env[69796]: WARNING nova.virt.vmwareapi.driver [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 could not be found. [ 1276.199187] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Destroying instance {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.199482] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-083aed51-95d7-418e-9aa5-27d1020cf752 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.208368] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11098a6-0361-4a5b-9c7f-da2d77031504 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.231296] env[69796]: WARNING nova.virt.vmwareapi.vmops [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 could not be found. [ 1276.231513] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance destroyed {{(pid=69796) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1276.231698] env[69796]: INFO nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 0.07 seconds to destroy the instance on the hypervisor. [ 1276.231961] env[69796]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69796) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1276.232214] env[69796]: DEBUG nova.compute.manager [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1276.232310] env[69796]: DEBUG nova.network.neutron [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1276.248778] env[69796]: DEBUG nova.network.neutron [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1276.349354] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.353157] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1276.751459] env[69796]: DEBUG nova.network.neutron [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.856410] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.856699] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.856873] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.857043] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1276.857987] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a0acc6-ba0c-4699-a0fb-f0f6164529de {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.866875] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29ca6f5-448e-4073-98b5-071a63d734f2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.880831] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aef71c37-a382-4063-bdaa-3fd262008fe0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.887756] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd01e832-4cb0-4916-9ba5-c147e66d6f29 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.917317] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180733MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1276.917492] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.917726] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.254068] env[69796]: INFO nova.compute.manager [-] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 1.02 seconds to deallocate network for instance. [ 1277.267190] env[69796]: WARNING nova.volume.cinder [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Attachment c3e5ff1a-f5e7-46ac-a426-94030eb0813e does not exist. Ignoring.: cinderclient.exceptions.NotFound: Volume attachment could not be found with filter: attachment_id = c3e5ff1a-f5e7-46ac-a426-94030eb0813e. (HTTP 404) (Request-ID: req-3cdd7a74-07b0-4fc5-92ab-d0969df7f0a4) [ 1277.267472] env[69796]: INFO nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Took 0.01 seconds to detach 1 volumes for instance. [ 1277.773677] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.952683] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.952884] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953032] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953163] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953282] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953498] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953579] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953617] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953740] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953855] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.953969] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.954129] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.954251] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1277.954455] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1277.954591] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1277.972448] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1277.986117] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1277.986368] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1277.997256] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1278.015761] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1278.205119] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86c84d7-f41a-44d1-802e-9e34731cc1ef {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.213177] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19580cf-9cec-4725-943b-fa05c107f285 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.243577] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a39d7797-a633-40e0-ab94-3879d6ee7aca {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.251875] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335ba15d-1478-4245-9ad8-a1dbee564d4b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.265632] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1278.898523] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-39cf0474-03f0-4a4e-b121-235f01dcd91c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-39cf0474-03f0-4a4e-b121-235f01dcd91c"}]} [ 1278.898828] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.981s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.899491] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1278.899491] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1278.899491] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1278.899491] env[69796]: ERROR nova.compute.manager yield [ 1278.899491] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1278.899491] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 1278.899491] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1278.899491] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1278.899491] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-39cf0474-03f0-4a4e-b121-235f01dcd91c"}]} [ 1278.899491] env[69796]: ERROR nova.compute.manager [ 1278.899491] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 1278.899491] env[69796]: ERROR nova.compute.manager [ 1278.900083] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 1278.900083] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 1278.900083] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1278.900083] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 1278.900083] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1278.900083] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1278.900083] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1278.900083] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1278.900083] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 1278.900083] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1278.900514] env[69796]: ERROR nova.compute.manager raise value [ 1278.900514] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1278.900514] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1278.900514] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1278.900514] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 1278.900514] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1278.900514] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 1278.900514] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1278.900514] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 1278.900514] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1278.900514] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 1278.900514] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1278.900514] env[69796]: ERROR nova.compute.manager [ 1278.900514] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.127s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.900857] env[69796]: DEBUG nova.objects.instance [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lazy-loading 'resources' on Instance uuid d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 {{(pid=69796) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1278.901166] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.421024] env[69796]: DEBUG nova.scheduler.client.report [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1279.437063] env[69796]: DEBUG nova.scheduler.client.report [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1279.437309] env[69796]: DEBUG nova.compute.provider_tree [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1279.449480] env[69796]: DEBUG nova.scheduler.client.report [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1279.469621] env[69796]: DEBUG nova.scheduler.client.report [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1279.655140] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628241cb-7e3c-4417-a57c-744223950726 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.664147] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a4d608-2b66-4ed9-b1dd-18751d58789d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.697667] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d766bfe-5be8-46ca-bac2-57c68b045ee3 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.706470] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a373d2-a42c-4494-b72b-dca77354dcac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.721512] env[69796]: DEBUG nova.compute.provider_tree [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1280.084255] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "e870394f-cddd-4836-98f0-31d79c839f1c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.084528] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "e870394f-cddd-4836-98f0-31d79c839f1c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.245391] env[69796]: ERROR nova.scheduler.client.report [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [req-10eed689-8541-4be4-aec2-e40631da07a3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10eed689-8541-4be4-aec2-e40631da07a3"}]} [ 1280.246032] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.346s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.246378] env[69796]: ERROR nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Setting instance vm_state to ERROR: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Traceback (most recent call last): [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] yield [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.set_inventory_for_provider( [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1280.246378] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10eed689-8541-4be4-aec2-e40631da07a3"}]} [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] During handling of the above exception, another exception occurred: [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Traceback (most recent call last): [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._delete_instance(context, instance, bdms) [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1280.246645] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._complete_deletion(context, instance) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update_resource_tracker(context, instance) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.rt.update_usage(context, instance, instance.node) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return f(*args, **kwargs) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update(context.elevated(), self.compute_nodes[nodename]) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self._update_to_placement(context, compute_node, startup) [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1280.246901] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] return attempt.get(self._wrap_exception) [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise value [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.reportclient.update_from_provider_tree( [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] with catch_all(pd.uuid): [ 1280.247214] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1280.247637] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] self.gen.throw(typ, value, traceback) [ 1280.247637] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1280.247637] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] raise exception.ResourceProviderSyncFailed() [ 1280.247637] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1280.247637] env[69796]: ERROR nova.compute.manager [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] [ 1280.587719] env[69796]: DEBUG nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1280.751438] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.187s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.860591] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.860591] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.860591] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.860591] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Cleaning up deleted instances with incomplete migration {{(pid=69796) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11903}} [ 1281.113868] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.113868] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.114419] env[69796]: INFO nova.compute.claims [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1282.138474] env[69796]: DEBUG nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1282.151720] env[69796]: DEBUG nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1282.151930] env[69796]: DEBUG nova.compute.provider_tree [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1282.163832] env[69796]: DEBUG nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1282.188283] env[69796]: DEBUG nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1282.262375] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.358414] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bdacf3-6273-4f0c-80be-2725b6909fcb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.366592] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564fa31f-583c-4954-bdb4-0c175f28744d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.396301] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2f2e92-d405-4da6-9c37-7aba21ae8fff {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.404263] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833c239a-b313-4cb4-8676-efa4c610ee4f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.419743] env[69796]: DEBUG nova.compute.provider_tree [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1282.942625] env[69796]: ERROR nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-9338526d-b380-49f5-a9f6-87cc8a045035] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9338526d-b380-49f5-a9f6-87cc8a045035"}]} [ 1282.943081] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.830s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.944144] env[69796]: ERROR nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Traceback (most recent call last): [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] yield [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] self.set_inventory_for_provider( [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1282.944144] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-9338526d-b380-49f5-a9f6-87cc8a045035"}]} [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] During handling of the above exception, another exception occurred: [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Traceback (most recent call last): [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] with self.rt.instance_claim(context, instance, node, allocs, [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1282.944398] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] return f(*args, **kwargs) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] self._update(elevated, cn) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] self._update_to_placement(context, compute_node, startup) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] return attempt.get(self._wrap_exception) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] raise value [ 1282.944661] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] self.reportclient.update_from_provider_tree( [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] with catch_all(pd.uuid): [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] self.gen.throw(typ, value, traceback) [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] raise exception.ResourceProviderSyncFailed() [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1282.944990] env[69796]: ERROR nova.compute.manager [instance: e870394f-cddd-4836-98f0-31d79c839f1c] [ 1282.945324] env[69796]: DEBUG nova.compute.utils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1282.946517] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.684s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.946812] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.947047] env[69796]: INFO nova.compute.manager [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] [instance: d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7] Successfully reverted task state from None on failure for instance. [ 1282.949674] env[69796]: DEBUG nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Build of instance e870394f-cddd-4836-98f0-31d79c839f1c was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1282.950159] env[69796]: DEBUG nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1282.950439] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-e870394f-cddd-4836-98f0-31d79c839f1c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.950620] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-e870394f-cddd-4836-98f0-31d79c839f1c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.950814] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server [None req-b1247931-b7a5-4491-83ea-8d2f50a5aa07 tempest-DeleteServersTestJSON-938478453 tempest-DeleteServersTestJSON-938478453-project-member] Exception during message handling: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server yield [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-10eed689-8541-4be4-aec2-e40631da07a3"}]} [ 1282.952499] env[69796]: ERROR oslo_messaging.rpc.server [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1282.952859] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 167, in decorated_function [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 158, in decorated_function [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 214, in decorated_function [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 204, in decorated_function [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1282.953450] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server raise self.value [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 929, in _complete_deletion [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 695, in _update_resource_tracker [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 1282.954055] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 732, in update_usage [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server return attempt.get(self._wrap_exception) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server raise value [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1282.954556] env[69796]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server with catch_all(pd.uuid): [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server self.gen.throw(typ, value, traceback) [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderSyncFailed() [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1282.955076] env[69796]: ERROR oslo_messaging.rpc.server [ 1283.470646] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1283.535467] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.852145] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1284.037700] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-e870394f-cddd-4836-98f0-31d79c839f1c" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.037979] env[69796]: DEBUG nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1284.038182] env[69796]: DEBUG nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1284.038358] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1284.053613] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1284.556366] env[69796]: DEBUG nova.network.neutron [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.059036] env[69796]: INFO nova.compute.manager [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: e870394f-cddd-4836-98f0-31d79c839f1c] Took 1.02 seconds to deallocate network for instance. [ 1286.086216] env[69796]: INFO nova.scheduler.client.report [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance e870394f-cddd-4836-98f0-31d79c839f1c [ 1286.595739] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8f60ff20-d92b-44fa-8996-b55b4965e66d tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "e870394f-cddd-4836-98f0-31d79c839f1c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.511s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.397714] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "10def6cc-d01e-4438-bb94-f41717e24b9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.398070] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "10def6cc-d01e-4438-bb94-f41717e24b9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.900164] env[69796]: DEBUG nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1289.423228] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.423503] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.425058] env[69796]: INFO nova.compute.claims [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1290.449780] env[69796]: DEBUG nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1290.463396] env[69796]: DEBUG nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1290.463620] env[69796]: DEBUG nova.compute.provider_tree [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1290.474402] env[69796]: DEBUG nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1290.493351] env[69796]: DEBUG nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1290.656940] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91185caf-f71a-4910-9460-5a520cd3e1b9 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.665261] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e70bd8-8801-4bac-993e-4b8057c62f2f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.695182] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130e39ac-a8c0-4eeb-8e20-ef9a1ba2027e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.703090] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82191590-1473-484a-89c7-a4c4d71e2ccc {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.717223] env[69796]: DEBUG nova.compute.provider_tree [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1291.237828] env[69796]: ERROR nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-4c6689fd-71ed-4673-b958-32e8576de87c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4c6689fd-71ed-4673-b958-32e8576de87c"}]} [ 1291.238207] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.815s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.239170] env[69796]: ERROR nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Traceback (most recent call last): [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] yield [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] self.set_inventory_for_provider( [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1291.239170] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-4c6689fd-71ed-4673-b958-32e8576de87c"}]} [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] During handling of the above exception, another exception occurred: [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Traceback (most recent call last): [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] with self.rt.instance_claim(context, instance, node, allocs, [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1291.239534] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] return f(*args, **kwargs) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] self._update(elevated, cn) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] self._update_to_placement(context, compute_node, startup) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] return attempt.get(self._wrap_exception) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] raise value [ 1291.239890] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] self.reportclient.update_from_provider_tree( [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] with catch_all(pd.uuid): [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] self.gen.throw(typ, value, traceback) [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] raise exception.ResourceProviderSyncFailed() [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1291.240376] env[69796]: ERROR nova.compute.manager [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] [ 1291.241988] env[69796]: DEBUG nova.compute.utils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1291.241988] env[69796]: DEBUG nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Build of instance 10def6cc-d01e-4438-bb94-f41717e24b9f was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1291.242163] env[69796]: DEBUG nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1291.242380] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-10def6cc-d01e-4438-bb94-f41717e24b9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1291.242528] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-10def6cc-d01e-4438-bb94-f41717e24b9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1291.242689] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1291.764628] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1291.849422] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1292.352600] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-10def6cc-d01e-4438-bb94-f41717e24b9f" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.352863] env[69796]: DEBUG nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1292.353077] env[69796]: DEBUG nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1292.353258] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1292.368943] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1292.872290] env[69796]: DEBUG nova.network.neutron [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1293.374831] env[69796]: INFO nova.compute.manager [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 10def6cc-d01e-4438-bb94-f41717e24b9f] Took 1.02 seconds to deallocate network for instance. [ 1294.406954] env[69796]: INFO nova.scheduler.client.report [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance 10def6cc-d01e-4438-bb94-f41717e24b9f [ 1294.917772] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8e31061d-b902-4ced-9c48-f204c26d620f tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "10def6cc-d01e-4438-bb94-f41717e24b9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.520s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.604820] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "c8c7e784-431f-4a77-aac5-f2c506b18f66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.605164] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "c8c7e784-431f-4a77-aac5-f2c506b18f66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.107721] env[69796]: DEBUG nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1297.626134] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.626447] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.628065] env[69796]: INFO nova.compute.claims [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.652519] env[69796]: DEBUG nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1298.664728] env[69796]: DEBUG nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1298.664950] env[69796]: DEBUG nova.compute.provider_tree [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1298.676896] env[69796]: DEBUG nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1298.693183] env[69796]: DEBUG nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1298.841242] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d396b5af-4963-4fbd-8499-7414eba5d58a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.849653] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d03445c-1601-4ad0-8137-10e2ed23dd22 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.878805] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f00ad2-1479-4956-a770-55f426249431 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.886400] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1815771-17e3-40ae-adbb-1c281c7a8bec {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.899857] env[69796]: DEBUG nova.compute.provider_tree [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1299.421079] env[69796]: ERROR nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-1289bb03-9e87-4ddb-b5cc-f569e2c4cca1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1289bb03-9e87-4ddb-b5cc-f569e2c4cca1"}]} [ 1299.421512] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.795s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.422125] env[69796]: ERROR nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Traceback (most recent call last): [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] yield [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] self.set_inventory_for_provider( [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1299.422125] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-1289bb03-9e87-4ddb-b5cc-f569e2c4cca1"}]} [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] During handling of the above exception, another exception occurred: [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Traceback (most recent call last): [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] with self.rt.instance_claim(context, instance, node, allocs, [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1299.422506] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] return f(*args, **kwargs) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] self._update(elevated, cn) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] self._update_to_placement(context, compute_node, startup) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] return attempt.get(self._wrap_exception) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] raise value [ 1299.422934] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] self.reportclient.update_from_provider_tree( [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] with catch_all(pd.uuid): [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] self.gen.throw(typ, value, traceback) [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] raise exception.ResourceProviderSyncFailed() [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1299.423395] env[69796]: ERROR nova.compute.manager [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] [ 1299.423681] env[69796]: DEBUG nova.compute.utils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1299.424765] env[69796]: DEBUG nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Build of instance c8c7e784-431f-4a77-aac5-f2c506b18f66 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1299.425187] env[69796]: DEBUG nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1299.425417] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-c8c7e784-431f-4a77-aac5-f2c506b18f66" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.425565] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-c8c7e784-431f-4a77-aac5-f2c506b18f66" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.425724] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.946278] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1300.025356] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.528410] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-c8c7e784-431f-4a77-aac5-f2c506b18f66" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.528638] env[69796]: DEBUG nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1300.528828] env[69796]: DEBUG nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1300.528997] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1300.544820] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1301.047565] env[69796]: DEBUG nova.network.neutron [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.550871] env[69796]: INFO nova.compute.manager [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: c8c7e784-431f-4a77-aac5-f2c506b18f66] Took 1.02 seconds to deallocate network for instance. [ 1302.582487] env[69796]: INFO nova.scheduler.client.report [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance c8c7e784-431f-4a77-aac5-f2c506b18f66 [ 1303.090607] env[69796]: DEBUG oslo_concurrency.lockutils [None req-98ddcf5d-acf0-4cef-bbb9-11afb9859c48 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "c8c7e784-431f-4a77-aac5-f2c506b18f66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.485s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.572505] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "7da2682e-dc19-4d1f-9e81-3806cfd15bab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.572814] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "7da2682e-dc19-4d1f-9e81-3806cfd15bab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.075231] env[69796]: DEBUG nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1305.593803] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.594103] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.595546] env[69796]: INFO nova.compute.claims [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1306.619931] env[69796]: DEBUG nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1306.632455] env[69796]: DEBUG nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1306.632672] env[69796]: DEBUG nova.compute.provider_tree [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1306.642978] env[69796]: DEBUG nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1306.660475] env[69796]: DEBUG nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1306.813560] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ee93a3-d7ea-4fe2-8722-bd8bf386aee4 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.821266] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1146dc76-3e26-4c46-82e1-e1f6d1d82dce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.851852] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17c5093-e466-4118-96f9-d9f58b827131 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.859072] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95fb5e8-797f-4b94-90c1-10f3e734d629 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.871929] env[69796]: DEBUG nova.compute.provider_tree [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1307.393358] env[69796]: ERROR nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-a36ab1c5-f6d0-41b5-8c61-2655c51a1bdf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a36ab1c5-f6d0-41b5-8c61-2655c51a1bdf"}]} [ 1307.393717] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.800s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.394316] env[69796]: ERROR nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Traceback (most recent call last): [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] yield [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] self.set_inventory_for_provider( [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1307.394316] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-a36ab1c5-f6d0-41b5-8c61-2655c51a1bdf"}]} [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] During handling of the above exception, another exception occurred: [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Traceback (most recent call last): [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] with self.rt.instance_claim(context, instance, node, allocs, [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1307.394584] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] return f(*args, **kwargs) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] self._update(elevated, cn) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] self._update_to_placement(context, compute_node, startup) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] return attempt.get(self._wrap_exception) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] raise value [ 1307.394879] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] self.reportclient.update_from_provider_tree( [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] with catch_all(pd.uuid): [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] self.gen.throw(typ, value, traceback) [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] raise exception.ResourceProviderSyncFailed() [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1307.395330] env[69796]: ERROR nova.compute.manager [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] [ 1307.395661] env[69796]: DEBUG nova.compute.utils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1307.396609] env[69796]: DEBUG nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Build of instance 7da2682e-dc19-4d1f-9e81-3806cfd15bab was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1307.396995] env[69796]: DEBUG nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1307.397238] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-7da2682e-dc19-4d1f-9e81-3806cfd15bab" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.397388] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-7da2682e-dc19-4d1f-9e81-3806cfd15bab" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.397549] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1307.916020] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1307.994273] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.497547] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-7da2682e-dc19-4d1f-9e81-3806cfd15bab" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.497759] env[69796]: DEBUG nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1308.497949] env[69796]: DEBUG nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1308.498156] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1308.514098] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1309.017239] env[69796]: DEBUG nova.network.neutron [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.520139] env[69796]: INFO nova.compute.manager [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 7da2682e-dc19-4d1f-9e81-3806cfd15bab] Took 1.02 seconds to deallocate network for instance. [ 1310.548958] env[69796]: INFO nova.scheduler.client.report [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance 7da2682e-dc19-4d1f-9e81-3806cfd15bab [ 1311.057058] env[69796]: DEBUG oslo_concurrency.lockutils [None req-41a19fa7-8b9b-40b7-911a-40845c2059e9 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "7da2682e-dc19-4d1f-9e81-3806cfd15bab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.484s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.527088] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "20412397-5d4d-4774-8834-49ff987a8186" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1312.527379] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "20412397-5d4d-4774-8834-49ff987a8186" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.029989] env[69796]: DEBUG nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1313.550774] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1313.551096] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1313.552603] env[69796]: INFO nova.compute.claims [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1314.577947] env[69796]: DEBUG nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1314.591612] env[69796]: DEBUG nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1314.591839] env[69796]: DEBUG nova.compute.provider_tree [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1314.602906] env[69796]: DEBUG nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1314.621075] env[69796]: DEBUG nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1314.772898] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a79fdd-bb34-452e-94fd-261d22e0ab0a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.780366] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba80c044-968d-4dc5-8e97-c795e9d9bfc5 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.810244] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a6453e-9d22-4503-aee9-4bae5fcd1354 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.817761] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5810df3-250d-40d3-9a0d-2759d798eb82 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.832396] env[69796]: DEBUG nova.compute.provider_tree [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.352193] env[69796]: ERROR nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-2e8b62f9-619a-4fe6-b243-8ae78292f3a8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2e8b62f9-619a-4fe6-b243-8ae78292f3a8"}]} [ 1315.352556] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.801s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1315.353163] env[69796]: ERROR nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] Traceback (most recent call last): [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] yield [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] self.set_inventory_for_provider( [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1315.353163] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-2e8b62f9-619a-4fe6-b243-8ae78292f3a8"}]} [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] During handling of the above exception, another exception occurred: [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] Traceback (most recent call last): [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] with self.rt.instance_claim(context, instance, node, allocs, [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1315.353409] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] return f(*args, **kwargs) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] self._update(elevated, cn) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] self._update_to_placement(context, compute_node, startup) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] return attempt.get(self._wrap_exception) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] raise value [ 1315.353679] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] self.reportclient.update_from_provider_tree( [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] with catch_all(pd.uuid): [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] self.gen.throw(typ, value, traceback) [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] raise exception.ResourceProviderSyncFailed() [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1315.354024] env[69796]: ERROR nova.compute.manager [instance: 20412397-5d4d-4774-8834-49ff987a8186] [ 1315.354382] env[69796]: DEBUG nova.compute.utils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1315.356040] env[69796]: DEBUG nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Build of instance 20412397-5d4d-4774-8834-49ff987a8186 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1315.356437] env[69796]: DEBUG nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1315.356720] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-20412397-5d4d-4774-8834-49ff987a8186" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1315.356887] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-20412397-5d4d-4774-8834-49ff987a8186" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1315.357140] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1315.876639] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.953931] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.456489] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-20412397-5d4d-4774-8834-49ff987a8186" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.456670] env[69796]: DEBUG nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1316.456845] env[69796]: DEBUG nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1316.457027] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1316.473923] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1316.976638] env[69796]: DEBUG nova.network.neutron [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1317.479951] env[69796]: INFO nova.compute.manager [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 20412397-5d4d-4774-8834-49ff987a8186] Took 1.02 seconds to deallocate network for instance. [ 1318.509470] env[69796]: INFO nova.scheduler.client.report [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance 20412397-5d4d-4774-8834-49ff987a8186 [ 1319.017428] env[69796]: DEBUG oslo_concurrency.lockutils [None req-228367b9-9a9c-48f3-9b13-10dd0c1f62d7 tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "20412397-5d4d-4774-8834-49ff987a8186" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.490s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.513870] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "84e6622c-4950-4f59-b998-96f53fd2bbd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.514201] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "84e6622c-4950-4f59-b998-96f53fd2bbd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.016423] env[69796]: DEBUG nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Starting instance... {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1321.536435] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.536746] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.538200] env[69796]: INFO nova.compute.claims [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.563595] env[69796]: DEBUG nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1322.578043] env[69796]: DEBUG nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1322.578302] env[69796]: DEBUG nova.compute.provider_tree [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1322.589778] env[69796]: DEBUG nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1322.608155] env[69796]: DEBUG nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1322.763607] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a742ee7b-93fe-461b-85e7-97765240ca1a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.771270] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27b93a4-f632-4aa7-9c8f-f1ac80bd0831 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.801071] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de13dcc-f730-4a9b-965c-2011397593ac {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.808605] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c97e832-a562-4921-b447-a25271cb3218 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.821929] env[69796]: DEBUG nova.compute.provider_tree [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1323.343047] env[69796]: ERROR nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [req-192f0404-527c-42d5-9362-2258f71c0f10] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-192f0404-527c-42d5-9362-2258f71c0f10"}]} [ 1323.343047] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.806s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1323.343428] env[69796]: ERROR nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Failed to build and run instance: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Traceback (most recent call last): [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] yield [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] self.set_inventory_for_provider( [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1323.343428] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-192f0404-527c-42d5-9362-2258f71c0f10"}]} [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] During handling of the above exception, another exception occurred: [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Traceback (most recent call last): [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] with self.rt.instance_claim(context, instance, node, allocs, [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1323.343663] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] return f(*args, **kwargs) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 240, in instance_claim [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] self._update(elevated, cn) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] self._update_to_placement(context, compute_node, startup) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] return attempt.get(self._wrap_exception) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] six.reraise(self.value[0], self.value[1], self.value[2]) [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] raise value [ 1323.343931] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] self.reportclient.update_from_provider_tree( [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] with catch_all(pd.uuid): [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] self.gen.throw(typ, value, traceback) [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] raise exception.ResourceProviderSyncFailed() [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1323.344279] env[69796]: ERROR nova.compute.manager [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] [ 1323.344590] env[69796]: DEBUG nova.compute.utils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1323.346309] env[69796]: DEBUG nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Build of instance 84e6622c-4950-4f59-b998-96f53fd2bbd5 was re-scheduled: Failed to synchronize the placement service with resource provider information supplied by the compute host. {{(pid=69796) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1323.346710] env[69796]: DEBUG nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Unplugging VIFs for instance {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1323.346940] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquiring lock "refresh_cache-84e6622c-4950-4f59-b998-96f53fd2bbd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.347099] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Acquired lock "refresh_cache-84e6622c-4950-4f59-b998-96f53fd2bbd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.347261] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Building network info cache for instance {{(pid=69796) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.868174] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1323.951711] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.455165] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Releasing lock "refresh_cache-84e6622c-4950-4f59-b998-96f53fd2bbd5" {{(pid=69796) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.455378] env[69796]: DEBUG nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69796) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1324.455569] env[69796]: DEBUG nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Deallocating network for instance {{(pid=69796) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.455839] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] deallocate_for_instance() {{(pid=69796) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.472574] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Instance cache missing network info. {{(pid=69796) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.975800] env[69796]: DEBUG nova.network.neutron [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Updating instance_info_cache with network_info: [] {{(pid=69796) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.479111] env[69796]: INFO nova.compute.manager [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] [instance: 84e6622c-4950-4f59-b998-96f53fd2bbd5] Took 1.02 seconds to deallocate network for instance. [ 1326.510876] env[69796]: INFO nova.scheduler.client.report [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Deleted allocations for instance 84e6622c-4950-4f59-b998-96f53fd2bbd5 [ 1327.019204] env[69796]: DEBUG oslo_concurrency.lockutils [None req-8921a620-9555-43c9-bf4c-4e7d0b90d0cb tempest-ServersTestJSON-2125645972 tempest-ServersTestJSON-2125645972-project-member] Lock "84e6622c-4950-4f59-b998-96f53fd2bbd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.505s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.354507] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.354507] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69796) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11184}} [ 1335.245724] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._sync_power_states {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.751847] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Getting list of instances from cluster (obj){ [ 1335.751847] env[69796]: value = "domain-c8" [ 1335.751847] env[69796]: _type = "ClusterComputeResource" [ 1335.751847] env[69796]: } {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1335.752953] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a927a390-f214-4f41-a0f6-5bebbbeeac04 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.762083] env[69796]: DEBUG nova.virt.vmwareapi.vmops [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Got total of 0 instances {{(pid=69796) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1335.762273] env[69796]: WARNING nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] While synchronizing instance power states, found 13 instances in the database and 0 instances on the hypervisor. [ 1335.762383] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 47f223c0-12b0-4eda-ab42-81fe8b95afac {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.762580] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 38792225-b054-4c08-b3ec-51d46287b0f9 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.762745] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 47005af8-11fe-498f-9b67-e0316faeeb8f {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.762914] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 836605ee-50cb-48b0-ba2e-33db3832f8ba {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763079] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid a4a16667-cd00-4850-9389-0bd57c7efd74 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763242] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 7f37f6c9-adba-4292-9d47-c455f77e539f {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763401] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 8b103adc-9903-406f-8fd1-e193e00cde11 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763554] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763705] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 3020e505-513b-4b29-996a-6e70a212f508 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.763856] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid f0d4f167-344a-4828-9f6e-8a62ed8e064d {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.764019] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.764177] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.764327] env[69796]: DEBUG nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Triggering sync for uuid 983f57b3-3bfb-41ce-a924-d48c72d25c9f {{(pid=69796) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10975}} [ 1335.764635] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.764833] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.765119] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "38792225-b054-4c08-b3ec-51d46287b0f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.765280] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.765510] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "47005af8-11fe-498f-9b67-e0316faeeb8f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.765670] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.765890] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.766062] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.766287] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "a4a16667-cd00-4850-9389-0bd57c7efd74" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.766442] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.766785] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "7f37f6c9-adba-4292-9d47-c455f77e539f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.766953] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.767206] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "8b103adc-9903-406f-8fd1-e193e00cde11" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.767365] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.767593] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.767752] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.767982] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "3020e505-513b-4b29-996a-6e70a212f508" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.768150] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "3020e505-513b-4b29-996a-6e70a212f508" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.768383] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.768553] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.768807] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.768965] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.769551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.769551] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.769699] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.769844] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.770182] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b83c6375-e668-4407-a484-24daa542e4a1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.772087] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b89ace1-90dc-4677-bdfe-082a7e53a993 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.773782] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-186abcf2-af37-49bf-b1ac-7d785ecf63e2 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.775242] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2dfa7950-3520-4f02-9c7d-e3d0e0c96d7f {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.778254] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-baeb2948-8ac9-4323-b849-319914091ebb {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.780091] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e5cb358-8a8d-46a9-97cc-f68638290e65 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.782067] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3116c421-3047-4b9e-90ac-2df362217e08 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.783837] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-72e1dcaf-e9ab-45fb-89f2-920e6f358639 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.785712] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0eda357-1182-40f8-99ee-c90b5e700b63 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.787507] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6be2dfc8-813f-4520-abdb-be74c67325ce {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.789394] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7a19e8e-0b76-48b7-a8aa-0aba6fb766c7 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.791154] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6badd1d-c6e5-48ac-8585-f90204ef45af {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.793026] env[69796]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8292373c-0820-48d1-92c3-e742bd6791cd {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.827038] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4933254-013a-4397-8ace-a7682f5bf3d1 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.840487] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53a2f65-5881-44ce-a42f-9fd1e3b14335 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.853298] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c25bb2b-79a6-4898-a574-95b2e8bb4d8b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.870918] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de60f8d-c1a0-4e39-a354-e8508ea1bf9a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.883628] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-981466f7-8749-45e1-aad2-ba2570ea1a9a {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.897076] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dd9cf57-263c-49ee-b4d5-b64692bb291b {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.909324] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.912239] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc61911-997e-4c47-ba06-aba959f2aa4e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.925164] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f478f660-c406-4e80-a435-0903da40276d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.937957] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29344ed1-a538-4b03-82a0-841bc69a906e {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.950538] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bad2691-27fe-4e92-b094-c8620261e330 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.963353] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0da28a1-1042-4edb-967a-09342fc69630 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.975676] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3073865e-a4ee-421d-a96e-92e88eeefe17 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.003058] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0c9de8-12f3-4f38-89a3-7fd4555e634d {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.062756] env[69796]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1336.067631] env[69796]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1336.072244] env[69796]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1336.081491] env[69796]: WARNING oslo_messaging._drivers.amqpdriver [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1336.086159] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "8b103adc-9903-406f-8fd1-e193e00cde11" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.319s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.086806] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.318s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.443272] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1336.518210] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "38792225-b054-4c08-b3ec-51d46287b0f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.753s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.536997] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47005af8-11fe-498f-9b67-e0316faeeb8f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.771s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.537402] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "47f223c0-12b0-4eda-ab42-81fe8b95afac" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.772s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.537784] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "836605ee-50cb-48b0-ba2e-33db3832f8ba" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.772s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.563531] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.794s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.587275] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "983f57b3-3bfb-41ce-a924-d48c72d25c9f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.817s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.591898] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "f0d4f167-344a-4828-9f6e-8a62ed8e064d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.823s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.596411] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "9a0e9a08-1176-4f88-bbcd-f0f52d3d7714" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.829s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.596730] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "a4a16667-cd00-4850-9389-0bd57c7efd74" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.830s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.597031] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "7f37f6c9-adba-4292-9d47-c455f77e539f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.830s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1336.598438] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "3020e505-513b-4b29-996a-6e70a212f508" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.830s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.353969] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.354262] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager.update_available_resource {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.857774] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.858235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.858235] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.858383] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69796) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1337.859312] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f919a563-3a00-4c79-8feb-04bf161368b0 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.868139] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1425c707-2ca9-41e2-86f6-55336c053f61 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.885048] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99a7adb-0727-4e61-9a8f-44c01c145170 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.892719] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe23c5e-27e1-4dbb-9398-3fe43c34cd04 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.922667] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180695MB free_disk=0GB free_vcpus=48 pci_devices=None {{(pid=69796) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1337.922865] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.923544] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.462829] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47f223c0-12b0-4eda-ab42-81fe8b95afac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463242] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 38792225-b054-4c08-b3ec-51d46287b0f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463242] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 47005af8-11fe-498f-9b67-e0316faeeb8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463361] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 836605ee-50cb-48b0-ba2e-33db3832f8ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463404] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance a4a16667-cd00-4850-9389-0bd57c7efd74 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463508] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 7f37f6c9-adba-4292-9d47-c455f77e539f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463625] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 8b103adc-9903-406f-8fd1-e193e00cde11 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463741] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 9a0e9a08-1176-4f88-bbcd-f0f52d3d7714 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463856] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 3020e505-513b-4b29-996a-6e70a212f508 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.463980] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance f0d4f167-344a-4828-9f6e-8a62ed8e064d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.464131] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance d1c6fd2b-462e-4136-b3ba-a4d4d113e4d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.464257] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance e4cfebda-58b2-4ee4-a670-e08ba2d2e1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.464375] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Instance 983f57b3-3bfb-41ce-a924-d48c72d25c9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69796) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.464596] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1339.464739] env[69796]: DEBUG nova.compute.resource_tracker [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69796) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1339.481810] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing inventories for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1339.496524] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating ProviderTree inventory for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1339.496726] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 1, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1339.510095] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing aggregate associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, aggregates: None {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1339.529625] env[69796]: DEBUG nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Refreshing trait associations for resource provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE {{(pid=69796) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1339.701830] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be31222-0620-4730-943d-da586bbe4a61 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.710973] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc44639-3b3b-4632-bc5d-fd72df109d63 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.745632] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e599379-ad14-4b8f-afc2-5186078b8193 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.754647] env[69796]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaae487b-cc45-404d-afa9-a12533e36516 {{(pid=69796) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.769518] env[69796]: DEBUG nova.compute.provider_tree [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Updating inventory in ProviderTree for provider dc1d576d-f9a3-4db7-b636-fdf2129d2ab3 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69796) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1340.291125] env[69796]: ERROR nova.scheduler.client.report [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] [req-aed71764-0e3a-4286-a3d8-faafe637067b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 0, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dc1d576d-f9a3-4db7-b636-fdf2129d2ab3. Got 400: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aed71764-0e3a-4286-a3d8-faafe637067b"}]} [ 1340.291448] env[69796]: DEBUG oslo_concurrency.lockutils [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.368s {{(pid=69796) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.292050] env[69796]: ERROR nova.compute.manager [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Error updating resources for node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28.: nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1340.292050] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1340.292050] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1406, in catch_all [ 1340.292050] env[69796]: ERROR nova.compute.manager yield [ 1340.292050] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 1340.292050] env[69796]: ERROR nova.compute.manager self.set_inventory_for_provider( [ 1340.292050] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1011, in set_inventory_for_provider [ 1340.292050] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderUpdateFailed(url=url, error=resp.text) [ 1340.292050] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderUpdateFailed: Failed to update resource provider via URL /resource_providers/dc1d576d-f9a3-4db7-b636-fdf2129d2ab3/inventories: {"errors": [{"status": 400, "title": "Bad Request", "detail": "The server could not comply with the request since it is either malformed or otherwise incorrect.\n\n JSON does not validate: 0 is less than the minimum of 1 Failed validating 'minimum' in schema['properties']['inventories']['patternProperties']['^[A-Z0-9_]+$']['properties']['max_unit']: {'type': 'integer', 'maximum': 2147483647, 'minimum': 1} On instance['inventories']['DISK_GB']['max_unit']: 0 ", "code": "placement.undefined_code", "request_id": "req-aed71764-0e3a-4286-a3d8-faafe637067b"}]} [ 1340.292050] env[69796]: ERROR nova.compute.manager [ 1340.292050] env[69796]: ERROR nova.compute.manager During handling of the above exception, another exception occurred: [ 1340.292050] env[69796]: ERROR nova.compute.manager [ 1340.292457] env[69796]: ERROR nova.compute.manager Traceback (most recent call last): [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 11220, in _update_available_resource_for_node [ 1340.292457] env[69796]: ERROR nova.compute.manager self.rt.update_available_resource(context, nodename, [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 965, in update_available_resource [ 1340.292457] env[69796]: ERROR nova.compute.manager self._update_available_resource(context, resources, startup=startup) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1340.292457] env[69796]: ERROR nova.compute.manager return f(*args, **kwargs) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1096, in _update_available_resource [ 1340.292457] env[69796]: ERROR nova.compute.manager self._update(context, cn, startup=startup) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1405, in _update [ 1340.292457] env[69796]: ERROR nova.compute.manager self._update_to_placement(context, compute_node, startup) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 1340.292457] env[69796]: ERROR nova.compute.manager return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 257, in call [ 1340.292457] env[69796]: ERROR nova.compute.manager return attempt.get(self._wrap_exception) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 1340.292457] env[69796]: ERROR nova.compute.manager six.reraise(self.value[0], self.value[1], self.value[2]) [ 1340.292457] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 1340.292976] env[69796]: ERROR nova.compute.manager raise value [ 1340.292976] env[69796]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 1340.292976] env[69796]: ERROR nova.compute.manager attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 1340.292976] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1390, in _update_to_placement [ 1340.292976] env[69796]: ERROR nova.compute.manager self.reportclient.update_from_provider_tree( [ 1340.292976] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1501, in update_from_provider_tree [ 1340.292976] env[69796]: ERROR nova.compute.manager with catch_all(pd.uuid): [ 1340.292976] env[69796]: ERROR nova.compute.manager File "/usr/lib/python3.10/contextlib.py", line 153, in __exit__ [ 1340.292976] env[69796]: ERROR nova.compute.manager self.gen.throw(typ, value, traceback) [ 1340.292976] env[69796]: ERROR nova.compute.manager File "/opt/stack/nova/nova/scheduler/client/report.py", line 1418, in catch_all [ 1340.292976] env[69796]: ERROR nova.compute.manager raise exception.ResourceProviderSyncFailed() [ 1340.292976] env[69796]: ERROR nova.compute.manager nova.exception.ResourceProviderSyncFailed: Failed to synchronize the placement service with resource provider information supplied by the compute host. [ 1340.292976] env[69796]: ERROR nova.compute.manager [ 1341.287326] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.287746] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.287746] env[69796]: DEBUG oslo_service.periodic_task [None req-b293c5af-74de-4a8a-9e41-72034570fcce None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69796) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}