[ 466.367858] env[62133]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62133) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 466.368303] env[62133]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62133) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 466.368303] env[62133]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62133) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 466.368614] env[62133]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 466.463210] env[62133]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62133) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 466.473301] env[62133]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62133) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 467.071922] env[62133]: INFO nova.virt.driver [None req-e9b52eac-f39d-4b98-9d96-085b1209b8ec None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 467.142343] env[62133]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 467.142514] env[62133]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 467.142600] env[62133]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62133) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 470.346225] env[62133]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-36c78950-2c54-4e31-ba7b-9633fc39ab85 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.361842] env[62133]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62133) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 470.361983] env[62133]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-27a841ba-a0a9-4a64-b2ea-c2ff7410502c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.389116] env[62133]: INFO oslo_vmware.api [-] Successfully established new session; session ID is cb94a. [ 470.389289] env[62133]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.247s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.389727] env[62133]: INFO nova.virt.vmwareapi.driver [None req-e9b52eac-f39d-4b98-9d96-085b1209b8ec None None] VMware vCenter version: 7.0.3 [ 470.393045] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52d0377-8107-424d-957e-880f4ecc83ee {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.413597] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304f4b3d-3918-4f85-8ba5-929aee534070 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.419396] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99746248-b219-4883-ace3-0c8f7937607d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.425892] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af718ae2-dc19-4f8d-a8ef-2f2c926136b6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.438652] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-badbf10d-2a90-4bff-af2f-8d7f25ca8e2a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.444456] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f65c35-3f18-41ef-bf8b-90f0e2efec87 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.474101] env[62133]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3bc8ad4d-2507-44aa-849c-51b7d5d7cedf {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 470.478908] env[62133]: DEBUG nova.virt.vmwareapi.driver [None req-e9b52eac-f39d-4b98-9d96-085b1209b8ec None None] Extension org.openstack.compute already exists. {{(pid=62133) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 470.481597] env[62133]: INFO nova.compute.provider_config [None req-e9b52eac-f39d-4b98-9d96-085b1209b8ec None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 470.985055] env[62133]: DEBUG nova.context [None req-e9b52eac-f39d-4b98-9d96-085b1209b8ec None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),7964fe57-155b-4748-a4c8-e51068b35a83(cell1) {{(pid=62133) load_cells /opt/stack/nova/nova/context.py:464}} [ 470.987182] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.987406] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.988193] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 470.988625] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Acquiring lock "7964fe57-155b-4748-a4c8-e51068b35a83" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 470.988847] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Lock "7964fe57-155b-4748-a4c8-e51068b35a83" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 470.989848] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Lock "7964fe57-155b-4748-a4c8-e51068b35a83" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 471.009768] env[62133]: INFO dbcounter [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Registered counter for database nova_cell0 [ 471.017822] env[62133]: INFO dbcounter [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Registered counter for database nova_cell1 [ 471.021507] env[62133]: DEBUG oslo_db.sqlalchemy.engines [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62133) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 471.021870] env[62133]: DEBUG oslo_db.sqlalchemy.engines [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62133) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 471.026618] env[62133]: ERROR nova.db.main.api [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 471.026618] env[62133]: result = function(*args, **kwargs) [ 471.026618] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 471.026618] env[62133]: return func(*args, **kwargs) [ 471.026618] env[62133]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 471.026618] env[62133]: result = fn(*args, **kwargs) [ 471.026618] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 471.026618] env[62133]: return f(*args, **kwargs) [ 471.026618] env[62133]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 471.026618] env[62133]: return db.service_get_minimum_version(context, binaries) [ 471.026618] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 471.026618] env[62133]: _check_db_access() [ 471.026618] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 471.026618] env[62133]: stacktrace = ''.join(traceback.format_stack()) [ 471.026618] env[62133]: [ 471.027765] env[62133]: ERROR nova.db.main.api [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 471.027765] env[62133]: result = function(*args, **kwargs) [ 471.027765] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 471.027765] env[62133]: return func(*args, **kwargs) [ 471.027765] env[62133]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 471.027765] env[62133]: result = fn(*args, **kwargs) [ 471.027765] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 471.027765] env[62133]: return f(*args, **kwargs) [ 471.027765] env[62133]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 471.027765] env[62133]: return db.service_get_minimum_version(context, binaries) [ 471.027765] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 471.027765] env[62133]: _check_db_access() [ 471.027765] env[62133]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 471.027765] env[62133]: stacktrace = ''.join(traceback.format_stack()) [ 471.027765] env[62133]: [ 471.028216] env[62133]: WARNING nova.objects.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 471.028318] env[62133]: WARNING nova.objects.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Failed to get minimum service version for cell 7964fe57-155b-4748-a4c8-e51068b35a83 [ 471.028902] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Acquiring lock "singleton_lock" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 471.029087] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Acquired lock "singleton_lock" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 471.029341] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Releasing lock "singleton_lock" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 471.029666] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Full set of CONF: {{(pid=62133) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 471.029812] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ******************************************************************************** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 471.029940] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Configuration options gathered from: {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 471.030089] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 471.030285] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 471.030413] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ================================================================================ {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 471.030628] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] allow_resize_to_same_host = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.030801] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] arq_binding_timeout = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.030931] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] backdoor_port = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031068] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] backdoor_socket = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031238] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] block_device_allocate_retries = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031398] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] block_device_allocate_retries_interval = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031595] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cert = self.pem {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031739] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.031908] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute_monitors = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032125] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] config_dir = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032309] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] config_drive_format = iso9660 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032445] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032614] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] config_source = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032784] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] console_host = devstack {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.032952] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] control_exchange = nova {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033126] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cpu_allocation_ratio = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033289] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] daemon = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033457] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] debug = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033615] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_access_ip_network_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033783] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_availability_zone = nova {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.033939] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_ephemeral_format = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.034125] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_green_pool_size = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.034365] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.034530] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] default_schedule_zone = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.034692] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] disk_allocation_ratio = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.034854] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] enable_new_services = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035044] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] enabled_apis = ['osapi_compute'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035215] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] enabled_ssl_apis = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035377] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] flat_injected = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035536] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] force_config_drive = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035696] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] force_raw_images = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.035866] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] graceful_shutdown_timeout = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.036033] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] heal_instance_info_cache_interval = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.036256] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] host = cpu-1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.036454] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.036641] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] initial_disk_allocation_ratio = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.036809] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] initial_ram_allocation_ratio = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037056] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037229] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_build_timeout = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037401] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_delete_interval = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037608] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_format = [instance: %(uuid)s] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037779] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_name_template = instance-%08x {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.037940] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_usage_audit = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.038125] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_usage_audit_period = month {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.038299] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.038467] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] instances_path = /opt/stack/data/nova/instances {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.038747] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] internal_service_availability_zone = internal {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.038915] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] key = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.039096] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] live_migration_retry_count = 30 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.039271] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_color = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.039439] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_config_append = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.039613] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040139] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_dir = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040139] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040139] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_options = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040301] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_rotate_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040391] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_rotate_interval_type = days {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040550] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] log_rotation_type = none {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040680] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040804] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.040971] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041146] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041276] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041440] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] long_rpc_timeout = 1800 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041603] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_concurrent_builds = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041763] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_concurrent_live_migrations = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.041921] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_concurrent_snapshots = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042091] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_local_block_devices = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042253] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_logfile_count = 30 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042411] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] max_logfile_size_mb = 200 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042570] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] maximum_instance_delete_attempts = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042737] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metadata_listen = 0.0.0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.042903] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metadata_listen_port = 8775 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043104] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metadata_workers = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043279] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] migrate_max_retries = -1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043450] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] mkisofs_cmd = genisoimage {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043660] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] my_block_storage_ip = 10.180.1.21 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043791] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] my_ip = 10.180.1.21 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.043954] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] network_allocate_retries = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044147] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044315] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] osapi_compute_listen = 0.0.0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044478] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] osapi_compute_listen_port = 8774 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044643] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] osapi_compute_unique_server_name_scope = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044810] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] osapi_compute_workers = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.044976] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] password_length = 12 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045149] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] periodic_enable = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045311] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] periodic_fuzzy_delay = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045482] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] pointer_model = usbtablet {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045649] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] preallocate_images = none {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045826] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] publish_errors = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.045968] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] pybasedir = /opt/stack/nova {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.046148] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ram_allocation_ratio = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.046323] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rate_limit_burst = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.046524] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rate_limit_except_level = CRITICAL {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.046696] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rate_limit_interval = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.046859] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reboot_timeout = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047027] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reclaim_instance_interval = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047204] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] record = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047381] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reimage_timeout_per_gb = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047586] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] report_interval = 120 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047752] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rescue_timeout = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.047916] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reserved_host_cpus = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.048090] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reserved_host_disk_mb = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.048254] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reserved_host_memory_mb = 512 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.048419] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] reserved_huge_pages = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.048583] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] resize_confirm_window = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.048849] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] resize_fs_using_block_device = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049029] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] resume_guests_state_on_host_boot = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049208] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049376] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] rpc_response_timeout = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049540] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] run_external_periodic_tasks = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049713] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] running_deleted_instance_action = reap {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.049879] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] running_deleted_instance_poll_interval = 1800 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050050] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] running_deleted_instance_timeout = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050215] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler_instance_sync_interval = 120 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050386] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_down_time = 720 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050559] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] servicegroup_driver = db {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050720] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] shell_completion = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.050880] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] shelved_offload_time = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051053] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] shelved_poll_interval = 3600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051225] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] shutdown_timeout = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051390] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] source_is_ipv6 = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051550] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ssl_only = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051809] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.051979] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] sync_power_state_interval = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052156] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] sync_power_state_pool_size = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052326] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] syslog_log_facility = LOG_USER {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052486] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] tempdir = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052647] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] timeout_nbd = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052813] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] transport_url = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.052974] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] update_resources_interval = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053145] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_cow_images = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053305] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_eventlog = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053464] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_journal = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053624] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_json = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053781] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_rootwrap_daemon = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.053940] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_stderr = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054112] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] use_syslog = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054274] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vcpu_pin_set = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054447] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plugging_is_fatal = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054614] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plugging_timeout = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054781] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] virt_mkfs = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.054943] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] volume_usage_poll_interval = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.055119] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] watch_log_file = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.055290] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] web = /usr/share/spice-html5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 471.055476] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_concurrency.disable_process_locking = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.056198] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.056318] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.056526] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.056776] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.056974] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.057163] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.057352] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.auth_strategy = keystone {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.057542] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.compute_link_prefix = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.057741] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.057925] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.dhcp_domain = novalocal {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.058111] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.enable_instance_password = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.058286] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.glance_link_prefix = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.058456] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.058635] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.058910] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.instance_list_per_project_cells = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.059133] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.list_records_by_skipping_down_cells = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.059313] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.local_metadata_per_cell = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.059491] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.max_limit = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.059668] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.metadata_cache_expiration = 15 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.059847] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.neutron_default_tenant_id = default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060029] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.response_validation = warn {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060206] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.use_neutron_default_nets = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060379] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060546] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060722] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.060899] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061084] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_dynamic_targets = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061254] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_jsonfile_path = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061440] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061637] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.backend = dogpile.cache.memcached {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061808] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.backend_argument = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.061983] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.config_prefix = cache.oslo {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.062171] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.dead_timeout = 60.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.062339] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.debug_cache_backend = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.062505] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.enable_retry_client = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.062667] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.enable_socket_keepalive = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.062839] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.enabled = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063012] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.enforce_fips_mode = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063183] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.expiration_time = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063347] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.hashclient_retry_attempts = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063514] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.hashclient_retry_delay = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063679] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_dead_retry = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.063839] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_password = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064010] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064183] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064347] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_pool_maxsize = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064511] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064673] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_sasl_enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.064854] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065030] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_socket_timeout = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065196] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.memcache_username = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065364] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.proxies = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065532] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_db = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065694] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_password = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.065865] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_sentinel_service_name = mymaster {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066055] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066232] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_server = localhost:6379 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066404] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_socket_timeout = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066594] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.redis_username = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066766] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.retry_attempts = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.066934] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.retry_delay = 0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067109] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.socket_keepalive_count = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067275] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.socket_keepalive_idle = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067442] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.socket_keepalive_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067625] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.tls_allowed_ciphers = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067794] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.tls_cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.067956] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.tls_certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.068131] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.tls_enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.068291] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cache.tls_keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.068466] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.068645] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.auth_type = password {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.068834] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069141] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.catalog_info = volumev3::publicURL {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069318] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069487] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069656] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.cross_az_attach = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069821] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.debug = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.069985] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.endpoint_template = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.070167] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.http_retries = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.070333] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.070497] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.070674] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.os_region_name = RegionOne {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.070841] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071010] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cinder.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071192] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071357] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.cpu_dedicated_set = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071521] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.cpu_shared_set = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071694] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.image_type_exclude_list = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.071862] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072037] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.max_concurrent_disk_ops = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072208] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.max_disk_devices_to_attach = -1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072375] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072549] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072718] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.resource_provider_association_refresh = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.072883] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073059] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.shutdown_retry_interval = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073247] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073430] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] conductor.workers = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073611] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] console.allowed_origins = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073773] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] console.ssl_ciphers = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.073945] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] console.ssl_minimum_version = default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074130] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] consoleauth.enforce_session_timeout = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074304] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] consoleauth.token_ttl = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074476] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074638] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074801] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.074960] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075133] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075293] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075459] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075618] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075777] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.075937] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076107] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076269] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076454] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076633] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.service_type = accelerator {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076800] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.076960] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.077133] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.077294] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.077478] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.077668] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] cyborg.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.077857] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.backend = sqlalchemy {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078041] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.connection = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078213] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.connection_debug = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078385] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.connection_parameters = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078553] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.connection_recycle_time = 3600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078725] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.connection_trace = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.078911] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.db_inc_retry_interval = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.079196] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.db_max_retries = 20 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.079376] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.db_max_retry_interval = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.079544] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.db_retry_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.079709] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.max_overflow = 50 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.079873] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.max_pool_size = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080046] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.max_retries = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080223] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080387] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.mysql_wsrep_sync_wait = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080548] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.pool_timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080713] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.retry_interval = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.080873] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.slave_connection = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081045] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.sqlite_synchronous = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081213] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] database.use_db_reconnect = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081393] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.backend = sqlalchemy {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081562] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.connection = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081729] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.connection_debug = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.081898] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.connection_parameters = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082075] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.connection_recycle_time = 3600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082243] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.connection_trace = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082406] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.db_inc_retry_interval = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082570] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.db_max_retries = 20 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082772] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.db_max_retry_interval = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.082892] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.db_retry_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083063] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.max_overflow = 50 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083226] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.max_pool_size = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083388] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.max_retries = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083557] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083719] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.083879] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.pool_timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084049] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.retry_interval = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084212] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.slave_connection = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084372] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] api_database.sqlite_synchronous = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084550] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] devices.enabled_mdev_types = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084730] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.084903] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ephemeral_storage_encryption.default_format = luks {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085080] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ephemeral_storage_encryption.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085251] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085423] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.api_servers = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085591] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085756] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.085920] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086094] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086260] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086436] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.debug = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086629] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.default_trusted_certificate_ids = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086794] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.enable_certificate_validation = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.086960] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.enable_rbd_download = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087134] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087305] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087472] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087658] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087824] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.087990] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.num_retries = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.088175] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.rbd_ceph_conf = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.088343] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.rbd_connect_timeout = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.088515] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.rbd_pool = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.088685] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.rbd_user = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.088872] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089052] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089312] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089494] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.service_type = image {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089664] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089829] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.089988] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.090166] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.090357] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.090526] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.verify_glance_signatures = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.090688] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] glance.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.090858] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] guestfs.debug = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.091036] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] mks.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.091400] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.091595] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.manager_interval = 2400 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.091768] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.precache_concurrency = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.091943] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.remove_unused_base_images = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.092130] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.092309] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.092488] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] image_cache.subdirectory_name = _base {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.092671] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.api_max_retries = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.092840] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.api_retry_interval = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093009] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093182] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.auth_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093343] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093505] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093669] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093835] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.conductor_group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.093996] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094172] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094333] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094498] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094661] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094820] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.094978] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.095225] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.peer_list = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.095416] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.095582] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.095750] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.serial_console_state_timeout = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.095912] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096095] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.service_type = baremetal {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096261] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.shard = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096437] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096609] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096773] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.096933] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097128] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097294] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ironic.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097480] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097654] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] key_manager.fixed_key = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097834] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.097997] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.barbican_api_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.098175] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.barbican_endpoint = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.098348] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.barbican_endpoint_type = public {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.098507] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.barbican_region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.098666] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.098849] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099031] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099274] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099463] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099636] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.number_of_retries = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099803] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.retry_delay = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.099969] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.send_service_user_token = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100149] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100314] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100485] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.verify_ssl = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100642] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican.verify_ssl_path = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100810] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.100975] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.auth_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101149] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101311] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101475] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101648] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101819] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.101984] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102158] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] barbican_service_user.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102328] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.approle_role_id = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102489] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.approle_secret_id = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102661] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.kv_mountpoint = secret {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102830] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.kv_path = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.102990] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.kv_version = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103165] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.namespace = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103327] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.root_token_id = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103488] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.ssl_ca_crt_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103657] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.timeout = 60.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103897] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.use_ssl = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.103988] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.104172] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.104356] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.auth_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.104521] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.104686] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.104851] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105015] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105182] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105342] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105507] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105667] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105825] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.105979] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.106151] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.106312] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.106504] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.106688] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.service_type = identity {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.106853] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107022] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107182] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107340] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107536] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107796] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] keystone.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.107918] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.connection_uri = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.108097] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_mode = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.108275] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_model_extra_flags = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.108448] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_models = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.108624] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_power_governor_high = performance {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.108818] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_power_governor_low = powersave {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109007] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_power_management = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109198] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109453] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.device_detach_attempts = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109642] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.device_detach_timeout = 20 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109815] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.disk_cachemodes = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.109981] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.disk_prefix = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110164] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.enabled_perf_events = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110331] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.file_backed_memory = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110500] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.gid_maps = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110664] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.hw_disk_discard = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110824] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.hw_machine_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.110994] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_rbd_ceph_conf = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.111173] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.111339] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.111513] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_rbd_glance_store_name = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.111691] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_rbd_pool = rbd {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.111864] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_type = default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112034] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.images_volume_group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112204] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.inject_key = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112370] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.inject_partition = -2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112534] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.inject_password = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112719] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.iscsi_iface = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.112936] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.iser_use_multipath = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113127] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_bandwidth = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113300] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113469] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_downtime = 500 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113638] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113802] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.113964] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_inbound_addr = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114142] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114306] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_permit_post_copy = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114468] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_scheme = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114643] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_timeout_action = abort {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114811] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_tunnelled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.114976] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_uri = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.115157] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.live_migration_with_native_tls = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.115324] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.max_queues = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.115490] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.115736] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.115917] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.nfs_mount_options = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.116250] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.116449] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.116629] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_iser_scan_tries = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.116798] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_memory_encrypted_guests = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.116968] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.117152] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_pcie_ports = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.117325] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.num_volume_scan_tries = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.117502] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.pmem_namespaces = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.117689] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.quobyte_client_cfg = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.117979] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.118168] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rbd_connect_timeout = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.118339] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.118504] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.118670] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rbd_secret_uuid = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.118854] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rbd_user = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119039] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119218] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.remote_filesystem_transport = ssh {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119383] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rescue_image_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119635] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rescue_kernel_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119807] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rescue_ramdisk_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.119981] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.120158] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.rx_queue_size = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.120331] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.smbfs_mount_options = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.120606] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.120783] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.snapshot_compression = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.120946] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.snapshot_image_format = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.121180] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.121351] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.sparse_logical_volumes = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.121517] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.swtpm_enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.121693] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.swtpm_group = tss {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.121860] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.swtpm_user = tss {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122043] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.sysinfo_serial = unique {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122211] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.tb_cache_size = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122374] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.tx_queue_size = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122543] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.uid_maps = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122709] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.use_virtio_for_bridges = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.122882] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.virt_type = kvm {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123064] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.volume_clear = zero {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123233] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.volume_clear_size = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123403] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.volume_use_multipath = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123565] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_cache_path = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123738] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.123907] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_mount_group = qemu {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.124084] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_mount_opts = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.124259] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.124540] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.124720] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.vzstorage_mount_user = stack {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.124889] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125084] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125264] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.auth_type = password {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125430] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125595] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125764] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.125926] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126097] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126271] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.default_floating_pool = public {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126451] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126635] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.extension_sync_interval = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126807] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.http_retries = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.126973] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.127150] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.127311] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.127484] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.127666] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.127842] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.ovs_bridge = br-int {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128017] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.physnets = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128192] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.region_name = RegionOne {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128354] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128551] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.service_metadata_proxy = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128933] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.128933] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.service_type = network {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129103] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129268] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129430] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129609] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129791] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.129955] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] neutron.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.130142] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] notifications.bdms_in_notifications = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.130322] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] notifications.default_level = INFO {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.130497] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] notifications.notification_format = unversioned {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.130666] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] notifications.notify_on_state_change = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.130841] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131032] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] pci.alias = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131210] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] pci.device_spec = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131379] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] pci.report_in_placement = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131554] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131733] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.auth_type = password {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.131902] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132079] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132244] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132410] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132573] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132735] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.132896] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.default_domain_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133071] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.default_domain_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133235] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.domain_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133395] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.domain_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133557] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133722] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.133881] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134050] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134214] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134384] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.password = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134547] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.project_domain_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134716] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.project_domain_name = Default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.134886] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.project_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135070] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.project_name = service {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135242] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.region_name = RegionOne {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135408] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135571] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135743] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.service_type = placement {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.135908] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136078] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136246] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136428] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.system_scope = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136592] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136758] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.trust_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.136932] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.user_domain_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.137134] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.user_domain_name = Default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.137303] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.user_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.137481] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.username = nova {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.137690] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.137859] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] placement.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138050] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.cores = 20 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138221] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.count_usage_from_placement = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138396] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138572] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.injected_file_content_bytes = 10240 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138743] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.injected_file_path_length = 255 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.138909] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.injected_files = 5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139089] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.instances = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139261] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.key_pairs = 100 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139429] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.metadata_items = 128 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139601] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.ram = 51200 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139761] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.recheck_quota = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.139929] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.server_group_members = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140107] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] quota.server_groups = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140289] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140458] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140622] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.image_metadata_prefilter = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140788] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.140954] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.max_attempts = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141132] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.max_placement_results = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141298] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141463] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.query_placement_for_image_type_support = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141627] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141801] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] scheduler.workers = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.141975] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.142160] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.142340] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.142511] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.142679] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.142844] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143023] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143213] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143393] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.host_subset_size = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143563] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143725] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.143888] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144068] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.isolated_hosts = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144245] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.isolated_images = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144415] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144581] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144747] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.144911] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.pci_in_placement = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145086] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145251] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145417] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145580] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145743] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.145904] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146078] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.track_instance_changes = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146259] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146455] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metrics.required = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146631] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metrics.weight_multiplier = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146799] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.146966] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] metrics.weight_setting = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.147296] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.147477] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.147690] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.port_range = 10000:20000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.147870] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148055] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148232] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] serial_console.serialproxy_port = 6083 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148404] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148581] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.auth_type = password {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148758] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.148939] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149121] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149290] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149452] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149628] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.send_service_user_token = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149795] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.149956] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] service_user.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.150141] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.agent_enabled = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.150309] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.150629] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.150826] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.150996] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.html5proxy_port = 6082 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.151176] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.image_compression = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.151338] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.jpeg_compression = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.151501] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.playback_compression = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.151667] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.require_secure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.151840] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.server_listen = 127.0.0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152016] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152181] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.streaming_mode = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152342] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] spice.zlib_compression = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152512] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] upgrade_levels.baseapi = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152682] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] upgrade_levels.compute = auto {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.152844] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] upgrade_levels.conductor = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153010] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] upgrade_levels.scheduler = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153187] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153353] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.auth_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153513] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153675] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.153837] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154007] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154174] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154337] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154497] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vendordata_dynamic_auth.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154671] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.api_retry_count = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.154831] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.ca_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155012] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.cache_prefix = devstack-image-cache {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155179] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.cluster_name = testcl1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155343] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.connection_pool_size = 10 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155503] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.console_delay_seconds = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155672] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.datastore_regex = ^datastore.* {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.155883] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156067] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.host_password = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156237] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.host_port = 443 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156432] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.host_username = administrator@vsphere.local {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156605] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.insecure = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156776] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.integration_bridge = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.156943] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.maximum_objects = 100 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157118] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.pbm_default_policy = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157286] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.pbm_enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157448] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.pbm_wsdl_location = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157641] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157811] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.serial_port_proxy_uri = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.157973] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.serial_port_service_uri = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.158154] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.task_poll_interval = 0.5 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.158330] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.use_linked_clone = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.158502] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.vnc_keymap = en-us {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.158672] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.vnc_port = 5900 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.158859] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vmware.vnc_port_total = 10000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.159061] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.auth_schemes = ['none'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.159243] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.159542] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.159728] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.159901] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.novncproxy_port = 6080 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160088] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.server_listen = 127.0.0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160264] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160426] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.vencrypt_ca_certs = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160587] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.vencrypt_client_cert = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160745] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vnc.vencrypt_client_key = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.160927] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161104] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_deep_image_inspection = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161271] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161435] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161601] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161760] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.disable_rootwrap = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.161919] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.enable_numa_live_migration = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162090] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162257] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162418] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162583] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.libvirt_disable_apic = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162745] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.162911] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163086] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163253] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163418] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163585] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163748] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.163910] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164082] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164254] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164437] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164611] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.client_socket_timeout = 900 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164777] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.default_pool_size = 1000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.164942] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.keep_alive = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165119] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.max_header_line = 16384 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165284] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.secure_proxy_ssl_header = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165446] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.ssl_ca_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165606] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.ssl_cert_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165765] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.ssl_key_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.165936] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.tcp_keepidle = 600 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.166124] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.166294] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] zvm.ca_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.166484] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] zvm.cloud_connector_url = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.166783] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.166959] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] zvm.reachable_timeout = 300 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.167157] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.enforce_new_defaults = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.167568] env[62133]: WARNING oslo_config.cfg [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 471.167767] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.enforce_scope = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.167952] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.policy_default_rule = default {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.168153] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.168334] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.policy_file = policy.yaml {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.168514] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.168678] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.168870] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169051] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169222] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169391] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169569] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169751] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.connection_string = messaging:// {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.169929] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.enabled = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170107] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.es_doc_type = notification {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170276] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.es_scroll_size = 10000 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170448] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.es_scroll_time = 2m {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170613] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.filter_error_trace = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170783] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.hmac_keys = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.170950] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.sentinel_service_name = mymaster {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171131] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.socket_timeout = 0.1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171298] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.trace_requests = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171461] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler.trace_sqlalchemy = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171645] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler_jaeger.process_tags = {} {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171809] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler_jaeger.service_name_prefix = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.171973] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] profiler_otlp.service_name_prefix = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172152] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] remote_debug.host = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172310] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] remote_debug.port = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172489] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172653] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172816] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.172978] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173152] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173314] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173477] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173639] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173801] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.173971] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.174145] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.174316] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.174485] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.174654] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.174827] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175022] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175165] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175338] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175504] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175667] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175835] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.175998] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.176172] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.176338] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.176536] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.176706] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.176870] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177041] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177217] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177383] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177582] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177772] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.177939] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.178124] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.178298] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.ssl_version = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.178462] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.178650] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.178842] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_notifications.retry = -1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179056] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179241] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_messaging_notifications.transport_url = **** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179415] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.auth_section = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179585] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.auth_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179750] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.cafile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.179912] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.certfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180089] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.collect_timing = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180255] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.connect_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180416] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.connect_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180575] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.endpoint_id = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180733] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.endpoint_override = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.180895] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.insecure = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181063] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.keyfile = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181225] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.max_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181383] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.min_version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181541] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.region_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181748] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.retriable_status_codes = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.181936] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.service_name = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182111] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.service_type = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182278] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.split_loggers = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182439] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.status_code_retries = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182600] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.status_code_retry_delay = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182760] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.timeout = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.182918] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.valid_interfaces = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183094] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_limit.version = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183268] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_reports.file_event_handler = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183440] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183601] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] oslo_reports.log_dir = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183772] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.183932] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184104] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184287] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184445] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184607] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184779] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.184941] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185117] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185286] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185448] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185609] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] vif_plug_ovs_privileged.user = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185778] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.flat_interface = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.185959] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.186146] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.186321] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.186521] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.186698] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.186866] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187041] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187226] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187403] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.isolate_vif = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187599] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187776] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.187950] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.188135] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.ovsdb_interface = native {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.188301] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_vif_ovs.per_port_bridge = False {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.188473] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_brick.lock_path = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.188640] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.188829] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189017] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.capabilities = [21] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189188] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189350] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.helper_command = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189516] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189681] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.189842] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] privsep_osbrick.user = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190026] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190193] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.group = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190353] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.helper_command = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190518] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190683] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190842] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] nova_sys_admin.user = None {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 471.190975] env[62133]: DEBUG oslo_service.service [None req-c2454a36-9cd5-4111-82b1-6f20b2bd493b None None] ******************************************************************************** {{(pid=62133) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 471.191472] env[62133]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 471.694624] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Getting list of instances from cluster (obj){ [ 471.694624] env[62133]: value = "domain-c8" [ 471.694624] env[62133]: _type = "ClusterComputeResource" [ 471.694624] env[62133]: } {{(pid=62133) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 471.695775] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4c0291-97e0-4f12-998c-37bd0c0dafc7 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 471.704059] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Got total of 0 instances {{(pid=62133) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 471.704593] env[62133]: WARNING nova.virt.vmwareapi.driver [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 471.705056] env[62133]: INFO nova.virt.node [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Generated node identity 2bb88767-ae4a-4310-888a-696a7895e29b [ 471.705285] env[62133]: INFO nova.virt.node [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Wrote node identity 2bb88767-ae4a-4310-888a-696a7895e29b to /opt/stack/data/n-cpu-1/compute_id [ 472.207956] env[62133]: WARNING nova.compute.manager [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Compute nodes ['2bb88767-ae4a-4310-888a-696a7895e29b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 473.213048] env[62133]: INFO nova.compute.manager [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 474.218315] env[62133]: WARNING nova.compute.manager [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 474.218726] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 474.218788] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 474.218926] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 474.219099] env[62133]: DEBUG nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62133) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 474.220027] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49919b1d-9d5f-46bc-b74c-6cdd421ee373 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.228578] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc76bcda-9b26-4544-a27f-c957dec78a88 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.242544] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1f90f0-5da5-42ec-9568-6fcc0a51c3a6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.248820] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23a80e78-39fb-48f7-b9df-2b60db192074 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 474.278364] env[62133]: DEBUG nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181670MB free_disk=61GB free_vcpus=48 pci_devices=None {{(pid=62133) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 474.278528] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 474.278689] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 474.783013] env[62133]: WARNING nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] No compute node record for cpu-1:2bb88767-ae4a-4310-888a-696a7895e29b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 2bb88767-ae4a-4310-888a-696a7895e29b could not be found. [ 475.286905] env[62133]: INFO nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 2bb88767-ae4a-4310-888a-696a7895e29b [ 476.794822] env[62133]: DEBUG nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62133) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 476.795193] env[62133]: DEBUG nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62133) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 476.944617] env[62133]: INFO nova.scheduler.client.report [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] [req-c9446fda-6f30-410c-a046-2347531d0ae7] Created resource provider record via placement API for resource provider with UUID 2bb88767-ae4a-4310-888a-696a7895e29b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 476.960357] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc3e7c6-d875-4392-a62b-71f9c36dc399 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.967956] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-badea2e3-7b05-4554-a2c4-4560c1bdae76 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 476.996973] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79c64a9-4585-45d3-9ec7-23b522e9da79 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 477.003767] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e47a95-40bc-440e-ad38-048b2504fcdb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 477.016302] env[62133]: DEBUG nova.compute.provider_tree [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 477.551158] env[62133]: DEBUG nova.scheduler.client.report [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Updated inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 477.551389] env[62133]: DEBUG nova.compute.provider_tree [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Updating resource provider 2bb88767-ae4a-4310-888a-696a7895e29b generation from 0 to 1 during operation: update_inventory {{(pid=62133) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 477.551530] env[62133]: DEBUG nova.compute.provider_tree [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 477.601580] env[62133]: DEBUG nova.compute.provider_tree [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Updating resource provider 2bb88767-ae4a-4310-888a-696a7895e29b generation from 1 to 2 during operation: update_traits {{(pid=62133) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 478.106379] env[62133]: DEBUG nova.compute.resource_tracker [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62133) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 478.106750] env[62133]: DEBUG oslo_concurrency.lockutils [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.828s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 478.106750] env[62133]: DEBUG nova.service [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Creating RPC server for service compute {{(pid=62133) start /opt/stack/nova/nova/service.py:186}} [ 478.122082] env[62133]: DEBUG nova.service [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] Join ServiceGroup membership for this service compute {{(pid=62133) start /opt/stack/nova/nova/service.py:203}} [ 478.122302] env[62133]: DEBUG nova.servicegroup.drivers.db [None req-915b1175-1198-4ebc-b1a6-3cb87f95c772 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62133) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 506.011441] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "6174840e-10a6-43ed-be9c-76602f91bf42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.011756] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "6174840e-10a6-43ed-be9c-76602f91bf42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.514654] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 507.061030] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 507.061030] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 507.061738] env[62133]: INFO nova.compute.claims [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 508.111916] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653c3038-6b5f-40f2-b72f-a87054822d01 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.124575] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afdc01f-4f20-4121-a7db-2892984fd16d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.160504] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8910a6f-74a4-4211-a8f1-8dc88a526b11 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.168924] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47bdf4c-6c9d-4921-b509-7502ae17c545 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.185828] env[62133]: DEBUG nova.compute.provider_tree [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 508.691031] env[62133]: DEBUG nova.scheduler.client.report [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 509.196475] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.136s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 509.196948] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 509.703113] env[62133]: DEBUG nova.compute.utils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 509.708628] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Not allocating networking since 'none' was specified. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 510.206724] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 511.224259] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 511.892478] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquiring lock "801672ea-1218-4c0a-9aae-d94ff1344731" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 511.892478] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "801672ea-1218-4c0a-9aae-d94ff1344731" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.397087] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 512.814939] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 512.815634] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 512.815634] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 512.815634] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 512.815857] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 512.815857] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 512.815996] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 512.816381] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 512.817074] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 512.817074] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 512.817394] env[62133]: DEBUG nova.virt.hardware [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 512.818062] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4309723d-8374-4cc5-8495-0ede8585da00 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.830643] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a08222d-982c-4740-8fbc-367b6fbe71c4 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.852262] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3482ba-787a-40a7-b693-9d6ee31fc47f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.871041] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance VIF info [] {{(pid=62133) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 512.881055] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62133) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 512.881352] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8a6efde-24d9-4d56-8f0b-19ce466bcfe0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.893512] env[62133]: INFO nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created folder: OpenStack in parent group-v4. [ 512.894229] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating folder: Project (799e8cd916964f1fb31f4fef772eff48). Parent ref: group-v192832. {{(pid=62133) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 512.894229] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37cba48f-db25-48d7-85c6-e1752857d43a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.907327] env[62133]: INFO nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created folder: Project (799e8cd916964f1fb31f4fef772eff48) in parent group-v192832. [ 512.907327] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating folder: Instances. Parent ref: group-v192833. {{(pid=62133) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 512.907497] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70290f95-c7e4-4d88-96b4-31b2f2b2ab46 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.917510] env[62133]: INFO nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created folder: Instances in parent group-v192833. [ 512.917818] env[62133]: DEBUG oslo.service.loopingcall [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 512.918102] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Creating VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 512.918253] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ad5ee74-18dd-4f39-ab54-7e97bbb5bbf4 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.934023] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 512.934023] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 512.935129] env[62133]: INFO nova.compute.claims [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 512.945318] env[62133]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 512.945318] env[62133]: value = "task-835115" [ 512.945318] env[62133]: _type = "Task" [ 512.945318] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 512.953667] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835115, 'name': CreateVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.367987] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "1225f605-2c40-4628-aefe-93359b077b89" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 513.368296] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "1225f605-2c40-4628-aefe-93359b077b89" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 513.459227] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835115, 'name': CreateVM_Task, 'duration_secs': 0.346804} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 513.461557] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Created VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 513.462675] env[62133]: DEBUG oslo_vmware.service [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9f0ef6-f613-4491-91c9-71409143cf7b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.469508] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 513.469677] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 513.470335] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 513.471071] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-460c320e-b92a-4cb0-8957-b6fb1ef9e5b8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 513.475293] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 513.475293] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52aff4a5-8ff3-7162-71ec-0e468f623868" [ 513.475293] env[62133]: _type = "Task" [ 513.475293] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 513.482797] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52aff4a5-8ff3-7162-71ec-0e468f623868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 513.871222] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 513.996752] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 513.997077] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Processing image 0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 513.997418] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 513.997917] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 513.998513] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 513.999201] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8aa2d4b2-7b1d-465f-bbf0-daf5bcb2fd81 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.019430] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 514.019430] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62133) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 514.019825] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c90d72a-8b95-4a0c-94a9-205dc21adf4f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.028225] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490ca336-1334-48ff-a86c-e0d8733ee489 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.031350] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976a4a64-b63b-49bf-b15c-1f358c8f80fb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.039261] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 514.039261] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52715cda-8c7e-0a53-99b0-fde547b4edd5" [ 514.039261] env[62133]: _type = "Task" [ 514.039261] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 514.049983] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52715cda-8c7e-0a53-99b0-fde547b4edd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 514.055434] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2db7275-919e-4012-9365-f4cc936b9657 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.086121] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80abf327-0360-4416-bc7b-f1e66feaf9b9 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.093361] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7429c4e5-ff18-445b-890a-83c22cc37ab0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.106574] env[62133]: DEBUG nova.compute.provider_tree [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 514.404494] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 514.559226] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Preparing fetch location {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 514.560851] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating directory with path [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 514.561362] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6bc44e4-73a5-4426-bd48-a83f1792dc85 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.593166] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created directory with path [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 514.593373] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Fetch image to [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 514.593542] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloading image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk on the data store datastore2 {{(pid=62133) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 514.594358] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6470100a-81e6-421e-ad48-25bb9de35382 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.602863] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7c6ed6-7a40-4d90-ab36-9500b7ffbe60 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.612488] env[62133]: DEBUG nova.scheduler.client.report [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 514.616699] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc91510-7bb4-41b6-a422-f2104197d0be {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.651986] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b11a19-a597-4599-a9ef-36d737a7c118 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.658734] env[62133]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-383dcd8c-94f2-4456-9bab-5f87938da9bb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.747017] env[62133]: DEBUG nova.virt.vmwareapi.images [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloading image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to the data store datastore2 {{(pid=62133) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 514.817468] env[62133]: DEBUG oslo_vmware.rw_handles [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62133) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 515.122348] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 515.123062] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 515.128221] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.724s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.133141] env[62133]: INFO nova.compute.claims [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 515.455378] env[62133]: DEBUG oslo_vmware.rw_handles [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Completed reading data from the image iterator. {{(pid=62133) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 515.455444] env[62133]: DEBUG oslo_vmware.rw_handles [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62133) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 515.593842] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.597790] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 515.612275] env[62133]: DEBUG nova.virt.vmwareapi.images [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloaded image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk on the data store datastore2 {{(pid=62133) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 515.615910] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Caching image {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 515.618129] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copying Virtual Disk [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk to [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 515.618129] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81812e30-2399-49c7-8cf6-f23de2320a28 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 515.628913] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 515.628913] env[62133]: value = "task-835116" [ 515.628913] env[62133]: _type = "Task" [ 515.628913] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 515.640401] env[62133]: DEBUG nova.compute.utils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 515.641805] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 515.642736] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 515.643030] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 515.778530] env[62133]: DEBUG nova.policy [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da88a08442694b09b67a0d458c467c81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4bacec6cd8c24d8ead5c106caca2b7c0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 515.843382] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquiring lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 515.843382] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.099900] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.146333] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 516.158892] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835116, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 516.317117] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-386b4af1-7d7c-4180-bcdc-fc585519220b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.329900] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749f8278-4c3c-4e31-bd89-08b79d019840 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.381922] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 516.385674] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e94927b-3832-4cde-ab0d-2133832d628c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.401319] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce9f4ab-6581-4fa9-8b1a-4d5c5814f79e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.419896] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 516.621938] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquiring lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.622256] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 516.639894] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.646537] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.702854} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 516.647475] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Successfully created port: 39252db1-5aa1-4085-b92e-9f2f390e21d0 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 516.649428] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copied Virtual Disk [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk to [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 516.649617] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 516.650646] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43c7ee11-7a84-4063-8f69-1111873c2f5b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 516.657246] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 516.657246] env[62133]: value = "task-835117" [ 516.657246] env[62133]: _type = "Task" [ 516.657246] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 516.670800] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 516.916881] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 516.923210] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 517.128606] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 517.169178] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 517.177375] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105624} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 517.177375] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 517.177375] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Moving file from [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a/0d8cd420-ac77-4102-b0d0-bf339970e2ce to [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce. {{(pid=62133) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 517.177375] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-84ef441d-480f-4f06-a317-54c05b202161 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.190018] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 517.190018] env[62133]: value = "task-835118" [ 517.190018] env[62133]: _type = "Task" [ 517.190018] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 517.202826] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835118, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 517.215747] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 517.216043] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 517.216719] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 517.216719] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 517.216719] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 517.216719] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 517.217107] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 517.217107] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 517.217175] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 517.219792] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 517.219792] env[62133]: DEBUG nova.virt.hardware [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 517.219792] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6f6ef1-576c-4f80-bbf0-d3241143db7f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.227748] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfab677b-91d6-4fd1-8bb7-25b5e5d86e13 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.429960] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 517.430628] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 517.435690] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.796s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.437223] env[62133]: INFO nova.compute.claims [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 517.446817] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquiring lock "f8330803-064a-4f31-b769-19b0ce281e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.446817] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "f8330803-064a-4f31-b769-19b0ce281e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.663881] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.702771] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835118, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.031973} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 517.703424] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] File moved {{(pid=62133) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 517.703784] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Cleaning up location [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 517.704077] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore2] vmware_temp/7352339a-d77e-4900-b952-c24111719e0a {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 517.706018] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbd81266-594c-4f26-b864-83c5ca15dc7d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 517.714033] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 517.714033] env[62133]: value = "task-835119" [ 517.714033] env[62133]: _type = "Task" [ 517.714033] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 517.731750] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835119, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 517.731750] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquiring lock "f774f4eb-7329-4449-8567-b4c0dc8fd78a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 517.731750] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Lock "f774f4eb-7329-4449-8567-b4c0dc8fd78a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 517.943671] env[62133]: DEBUG nova.compute.utils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 517.945448] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 517.945448] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 517.952448] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.192597] env[62133]: DEBUG nova.policy [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '418a27b4885f4cae9a00012e42dabece', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd56b28fa8f7449abcdbe2ecac0f9503', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 518.221756] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835119, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026436} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 518.221984] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 518.222701] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8e26f8a-548b-46ef-bbba-0c50fa5f294c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.227940] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 518.227940] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52f50160-56fd-3280-aa6d-75f27c67230c" [ 518.227940] env[62133]: _type = "Task" [ 518.227940] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 518.238021] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 518.238515] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52f50160-56fd-3280-aa6d-75f27c67230c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 518.453315] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 518.488202] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 518.625128] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da54f488-fe62-4d8a-bcba-71427a8128e7 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.634812] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8440e9-80dc-434a-8bf1-7c498240e264 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.667307] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4506ac-8aee-4f2c-a411-085beca55a9c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.676289] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5999296c-f135-4398-b38f-908e893d1646 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.684070] env[62133]: ERROR nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 518.684070] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.684070] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 518.684070] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 518.684070] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.684070] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.684070] env[62133]: ERROR nova.compute.manager raise self.value [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 518.684070] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 518.684070] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.684070] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 518.684725] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.684725] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 518.684725] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 518.684725] env[62133]: ERROR nova.compute.manager [ 518.684725] env[62133]: Traceback (most recent call last): [ 518.684725] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 518.684725] env[62133]: listener.cb(fileno) [ 518.684725] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.684725] env[62133]: result = function(*args, **kwargs) [ 518.684725] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 518.684725] env[62133]: return func(*args, **kwargs) [ 518.684725] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.684725] env[62133]: raise e [ 518.684725] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.684725] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 518.684725] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 518.684725] env[62133]: created_port_ids = self._update_ports_for_instance( [ 518.684725] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 518.684725] env[62133]: with excutils.save_and_reraise_exception(): [ 518.684725] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.684725] env[62133]: self.force_reraise() [ 518.684725] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.684725] env[62133]: raise self.value [ 518.684725] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 518.684725] env[62133]: updated_port = self._update_port( [ 518.684725] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.684725] env[62133]: _ensure_no_port_binding_failure(port) [ 518.684725] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.684725] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 518.685693] env[62133]: nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 518.685693] env[62133]: Removing descriptor: 15 [ 518.685693] env[62133]: ERROR nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Traceback (most recent call last): [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] yield resources [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.driver.spawn(context, instance, image_meta, [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self._vmops.spawn(context, instance, image_meta, injected_files, [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 518.685693] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] vm_ref = self.build_virtual_machine(instance, [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] vif_infos = vmwarevif.get_vif_info(self._session, [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] for vif in network_info: [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self._sync_wrapper(fn, *args, **kwargs) [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.wait() [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self[:] = self._gt.wait() [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self._exit_event.wait() [ 518.687798] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] result = hub.switch() [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self.greenlet.switch() [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] result = function(*args, **kwargs) [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return func(*args, **kwargs) [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise e [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] nwinfo = self.network_api.allocate_for_instance( [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 518.688298] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] created_port_ids = self._update_ports_for_instance( [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] with excutils.save_and_reraise_exception(): [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.force_reraise() [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise self.value [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] updated_port = self._update_port( [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] _ensure_no_port_binding_failure(port) [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 518.688666] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise exception.PortBindingFailed(port_id=port['id']) [ 518.688954] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 518.688954] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] [ 518.688954] env[62133]: INFO nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Terminating instance [ 518.688954] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquiring lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 518.688954] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquired lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 518.688954] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 518.700322] env[62133]: DEBUG nova.compute.provider_tree [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 518.742164] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52f50160-56fd-3280-aa6d-75f27c67230c, 'name': SearchDatastore_Task, 'duration_secs': 0.015021} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 518.742649] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 518.743062] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 518.743507] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6b9d0a00-74bf-4509-8a56-b9e763edaad2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 518.755930] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 518.755930] env[62133]: value = "task-835120" [ 518.755930] env[62133]: _type = "Task" [ 518.755930] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 518.767589] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835120, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 518.774602] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 519.117299] env[62133]: DEBUG nova.compute.manager [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Received event network-changed-39252db1-5aa1-4085-b92e-9f2f390e21d0 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 519.117299] env[62133]: DEBUG nova.compute.manager [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Refreshing instance network info cache due to event network-changed-39252db1-5aa1-4085-b92e-9f2f390e21d0. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 519.117299] env[62133]: DEBUG oslo_concurrency.lockutils [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] Acquiring lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 519.202496] env[62133]: DEBUG nova.scheduler.client.report [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 519.265729] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835120, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 519.420763] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 519.468322] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 519.510529] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 519.510783] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 519.510916] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 519.511110] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 519.511258] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 519.512253] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 519.512437] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 519.512652] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 519.512783] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 519.512941] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 519.513164] env[62133]: DEBUG nova.virt.hardware [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 519.514221] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615a7dc4-a8a5-46e2-824e-e37d2b21f998 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.524517] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07c3306-72b3-48e2-ad2c-de1bd6682f49 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.662768] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Successfully created port: 739d8700-7fd6-45a3-8c2a-c13a60475dc1 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 519.709894] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 519.709894] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 519.717021] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.796s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 519.717021] env[62133]: INFO nova.compute.claims [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 519.764340] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 519.771099] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835120, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513461} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 519.772973] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 519.773124] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extending root virtual disk to 1048576 {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 519.773391] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef42ad41-2575-4768-bc15-bf4168d6b8c0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 519.781472] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 519.781472] env[62133]: value = "task-835121" [ 519.781472] env[62133]: _type = "Task" [ 519.781472] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 519.796520] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835121, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 520.218041] env[62133]: DEBUG nova.compute.utils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 520.220849] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 520.220930] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 520.272170] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Releasing lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 520.272791] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 520.272896] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 520.273594] env[62133]: DEBUG oslo_concurrency.lockutils [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] Acquired lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 520.273776] env[62133]: DEBUG nova.network.neutron [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Refreshing network info cache for port 39252db1-5aa1-4085-b92e-9f2f390e21d0 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 520.281236] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5ae609a-e0e5-400c-80a3-1850994a883f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.303392] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc931fe7-0337-4944-9812-74d7bb7fc0a5 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.318803] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835121, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07375} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 520.322049] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extended root virtual disk {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 520.323473] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb291eed-090d-472d-8f92-f3e7ce9c7038 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.349931] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 520.365720] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50c4b82b-05fb-4fd4-9de9-bc2b07efe946 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.383959] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 801672ea-1218-4c0a-9aae-d94ff1344731 could not be found. [ 520.383959] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 520.383959] env[62133]: INFO nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Took 0.11 seconds to destroy the instance on the hypervisor. [ 520.387227] env[62133]: DEBUG oslo.service.loopingcall [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 520.387227] env[62133]: DEBUG nova.compute.manager [-] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 520.387227] env[62133]: DEBUG nova.network.neutron [-] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 520.400101] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 520.400101] env[62133]: value = "task-835122" [ 520.400101] env[62133]: _type = "Task" [ 520.400101] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 520.408809] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 520.455147] env[62133]: DEBUG nova.network.neutron [-] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 520.639654] env[62133]: DEBUG nova.policy [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d23d9ce9461498bb9bd3c8b21563f28', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9150561a78747b7ae63437a3bd0af32', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 520.729606] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 520.881686] env[62133]: DEBUG nova.network.neutron [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 520.910012] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835122, 'name': ReconfigVM_Task, 'duration_secs': 0.311482} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 520.911111] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 520.912428] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1288db7d-20d8-4672-9ac8-ef25a1a972e5 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.917023] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56ad5182-08e0-4304-9271-1e9b5bf89506 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.925566] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063e99ad-a0ba-41c4-bf6f-8aa82c408a6e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.928906] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 520.928906] env[62133]: value = "task-835123" [ 520.928906] env[62133]: _type = "Task" [ 520.928906] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 520.962451] env[62133]: DEBUG nova.network.neutron [-] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 520.964931] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6af6c75-255f-484d-bf57-d4c0362bbe45 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.971499] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835123, 'name': Rename_Task} progress is 14%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 520.976924] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de51060c-5899-4b4a-aa15-13ea2156866d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 520.994224] env[62133]: DEBUG nova.compute.provider_tree [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 521.236650] env[62133]: DEBUG nova.network.neutron [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 521.440986] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835123, 'name': Rename_Task, 'duration_secs': 0.145642} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 521.441266] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 521.441563] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f56c0ad-aa96-4781-8d8d-34f5cdea2c79 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.450230] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 521.450230] env[62133]: value = "task-835124" [ 521.450230] env[62133]: _type = "Task" [ 521.450230] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 521.462782] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 521.472589] env[62133]: INFO nova.compute.manager [-] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Took 1.09 seconds to deallocate network for instance. [ 521.473593] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Successfully created port: f8d4902d-559e-44bc-b68f-8826931b4441 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 521.479930] env[62133]: DEBUG nova.compute.claims [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 521.480121] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 521.499312] env[62133]: DEBUG nova.scheduler.client.report [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 521.722106] env[62133]: DEBUG nova.compute.manager [req-d2abea49-d140-41cc-982b-665f4c2bef33 req-c5f7899e-ba63-4966-86dd-49c57151e527 service nova] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Received event network-vif-deleted-39252db1-5aa1-4085-b92e-9f2f390e21d0 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 521.740403] env[62133]: DEBUG oslo_concurrency.lockutils [req-7be470ee-dd95-47bd-bd1d-da832dc9bd5c req-cdd38637-ce2f-4175-93e1-94893fbf91d1 service nova] Releasing lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 521.750421] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 521.782104] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 521.782333] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 521.782413] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 521.782562] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 521.782707] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 521.782851] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 521.783204] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 521.783404] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 521.783665] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 521.783837] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 521.784012] env[62133]: DEBUG nova.virt.hardware [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 521.785765] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43818a8-efe4-4438-a954-4348887351e0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.794165] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a90cfd-4189-45f5-8628-ff5e228b012c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 521.965122] env[62133]: DEBUG oslo_vmware.api [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835124, 'name': PowerOnVM_Task, 'duration_secs': 0.448238} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 521.966535] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 521.966657] env[62133]: INFO nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Took 10.74 seconds to spawn the instance on the hypervisor. [ 521.966918] env[62133]: DEBUG nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 521.967788] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95bede8-5b79-4abd-969b-b4632221e92a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.011670] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 522.011782] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 522.014263] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.351s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 522.018272] env[62133]: INFO nova.compute.claims [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 522.491435] env[62133]: INFO nova.compute.manager [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Took 15.47 seconds to build instance. [ 522.522899] env[62133]: DEBUG nova.compute.utils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 522.527405] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 522.527405] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 522.733308] env[62133]: DEBUG nova.policy [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4983ff5f7dec4ba6aa2200452d5b7c17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e40d89d6857b4e978fd79199ecb7a8e1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 522.994909] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f768f973-81d4-41e8-aac0-c8fb292e49a3 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "6174840e-10a6-43ed-be9c-76602f91bf42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.983s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 523.030407] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 523.226196] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4165694e-5d94-4c06-8f2b-ef4b82517a90 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.234603] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2d6973-c9c4-4f76-b0ac-3364b7988e5f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.272760] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cc74b9-c521-45f2-8975-21adfb6bfb63 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.281469] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125a47fc-9bed-49c1-85db-a5bf08aec11f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 523.298217] env[62133]: DEBUG nova.compute.provider_tree [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 523.628817] env[62133]: ERROR nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 523.628817] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.628817] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.628817] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.628817] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.628817] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.628817] env[62133]: ERROR nova.compute.manager raise self.value [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.628817] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 523.628817] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.628817] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 523.629346] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.629346] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 523.629346] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 523.629346] env[62133]: ERROR nova.compute.manager [ 523.629346] env[62133]: Traceback (most recent call last): [ 523.629346] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 523.629346] env[62133]: listener.cb(fileno) [ 523.629346] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.629346] env[62133]: result = function(*args, **kwargs) [ 523.629346] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.629346] env[62133]: return func(*args, **kwargs) [ 523.629346] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 523.629346] env[62133]: raise e [ 523.629346] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.629346] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 523.629346] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.629346] env[62133]: created_port_ids = self._update_ports_for_instance( [ 523.629346] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.629346] env[62133]: with excutils.save_and_reraise_exception(): [ 523.629346] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.629346] env[62133]: self.force_reraise() [ 523.629346] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.629346] env[62133]: raise self.value [ 523.629346] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.629346] env[62133]: updated_port = self._update_port( [ 523.629346] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.629346] env[62133]: _ensure_no_port_binding_failure(port) [ 523.629346] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.629346] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 523.630059] env[62133]: nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 523.630059] env[62133]: Removing descriptor: 16 [ 523.630059] env[62133]: ERROR nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] Traceback (most recent call last): [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] yield resources [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.driver.spawn(context, instance, image_meta, [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 523.630059] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] vm_ref = self.build_virtual_machine(instance, [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] vif_infos = vmwarevif.get_vif_info(self._session, [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] for vif in network_info: [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self._sync_wrapper(fn, *args, **kwargs) [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.wait() [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self[:] = self._gt.wait() [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self._exit_event.wait() [ 523.630415] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] result = hub.switch() [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self.greenlet.switch() [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] result = function(*args, **kwargs) [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return func(*args, **kwargs) [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise e [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] nwinfo = self.network_api.allocate_for_instance( [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 523.630766] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] created_port_ids = self._update_ports_for_instance( [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] with excutils.save_and_reraise_exception(): [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.force_reraise() [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise self.value [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] updated_port = self._update_port( [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] _ensure_no_port_binding_failure(port) [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 523.631136] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise exception.PortBindingFailed(port_id=port['id']) [ 523.631461] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 523.631461] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] [ 523.631461] env[62133]: INFO nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Terminating instance [ 523.637330] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 523.637330] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquired lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.637330] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 523.804637] env[62133]: DEBUG nova.scheduler.client.report [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 523.889269] env[62133]: INFO nova.compute.manager [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Rebuilding instance [ 524.047236] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 524.049710] env[62133]: DEBUG nova.compute.manager [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 524.051365] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57bf5ac-891f-4ae5-acdc-6dd5c9663773 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.079226] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 524.080019] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 524.080019] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 524.080019] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 524.080019] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 524.088157] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 524.088584] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 524.088656] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 524.088860] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 524.089529] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 524.089529] env[62133]: DEBUG nova.virt.hardware [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 524.090535] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88422957-8abe-44e6-b244-2bbe627e0d73 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.103524] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8430ce3-4fef-452b-993d-302ace7b8651 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.213213] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 524.309375] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.295s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 524.310233] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 524.314223] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.827s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 524.317690] env[62133]: INFO nova.compute.claims [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 524.430137] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 524.478957] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Successfully created port: 73123aa0-496f-461c-95b2-f57a8fef14bf {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 524.564045] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 524.564361] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cf6e610-2b4a-4ce4-ac8a-8af2bf60753c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.571708] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 524.571708] env[62133]: value = "task-835125" [ 524.571708] env[62133]: _type = "Task" [ 524.571708] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 524.580977] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 524.641482] env[62133]: DEBUG nova.compute.manager [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Received event network-changed-739d8700-7fd6-45a3-8c2a-c13a60475dc1 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 524.641722] env[62133]: DEBUG nova.compute.manager [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Refreshing instance network info cache due to event network-changed-739d8700-7fd6-45a3-8c2a-c13a60475dc1. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 524.642181] env[62133]: DEBUG oslo_concurrency.lockutils [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] Acquiring lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 524.824422] env[62133]: DEBUG nova.compute.utils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 524.829358] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 524.829519] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 524.932713] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Releasing lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 524.933239] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 524.933438] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 524.933750] env[62133]: DEBUG oslo_concurrency.lockutils [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] Acquired lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 524.933919] env[62133]: DEBUG nova.network.neutron [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Refreshing network info cache for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 524.935017] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40d32c3d-2798-4586-a452-44aea082e51f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.945536] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0feda3a4-f9e0-4a28-859f-4267c8470d24 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.975401] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1225f605-2c40-4628-aefe-93359b077b89 could not be found. [ 524.977124] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 524.978031] env[62133]: INFO nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Took 0.04 seconds to destroy the instance on the hypervisor. [ 524.978031] env[62133]: DEBUG oslo.service.loopingcall [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 524.978031] env[62133]: DEBUG nova.compute.manager [-] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 524.978254] env[62133]: DEBUG nova.network.neutron [-] [instance: 1225f605-2c40-4628-aefe-93359b077b89] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 525.058252] env[62133]: DEBUG nova.network.neutron [-] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.082553] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835125, 'name': PowerOffVM_Task, 'duration_secs': 0.124587} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.082847] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 525.083092] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 525.083960] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6005e0f-b22a-4255-89e0-5c5b1c572aac {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.093082] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistering the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 525.093501] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15593d31-2913-46ce-8959-1e20eea98a1c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.110353] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquiring lock "3410b0c4-da88-4146-bb50-9b5a9a5d0d87" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 525.111476] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Lock "3410b0c4-da88-4146-bb50-9b5a9a5d0d87" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 525.123758] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistered the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 525.123967] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleting contents of the VM from datastore datastore2 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 525.124242] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42 {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 525.124533] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99ca4a7e-2048-441f-8fce-5115d8dab061 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.132640] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 525.132640] env[62133]: value = "task-835127" [ 525.132640] env[62133]: _type = "Task" [ 525.132640] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 525.143316] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 525.182419] env[62133]: DEBUG nova.policy [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e03f2a7946bd45ae9e7d646879e0acb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b33e15b2dd8d444796d7002b1edaf8fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 525.330576] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 525.547690] env[62133]: DEBUG nova.compute.manager [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Received event network-changed-f8d4902d-559e-44bc-b68f-8826931b4441 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 525.547869] env[62133]: DEBUG nova.compute.manager [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Refreshing instance network info cache due to event network-changed-f8d4902d-559e-44bc-b68f-8826931b4441. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 525.548086] env[62133]: DEBUG oslo_concurrency.lockutils [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] Acquiring lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 525.548317] env[62133]: DEBUG oslo_concurrency.lockutils [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] Acquired lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 525.548385] env[62133]: DEBUG nova.network.neutron [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Refreshing network info cache for port f8d4902d-559e-44bc-b68f-8826931b4441 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 525.561158] env[62133]: DEBUG nova.network.neutron [-] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 525.591776] env[62133]: DEBUG nova.network.neutron [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 525.605698] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d910cfb-b42b-4e43-aad4-c3645acf32bf {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.612961] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a0068e-163e-47b8-ae50-04b908ff81d1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.616324] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 525.657600] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc548c8-5e18-432c-a72b-26d23b4a666c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.664990] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229863} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 525.667104] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 525.667286] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleted contents of the VM from datastore datastore2 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 525.667720] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 525.672454] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e209db34-796d-4124-bdab-b8baf22349ef {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 525.691326] env[62133]: DEBUG nova.compute.provider_tree [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 525.792327] env[62133]: DEBUG nova.network.neutron [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.018547] env[62133]: ERROR nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 526.018547] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.018547] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.018547] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.018547] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.018547] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.018547] env[62133]: ERROR nova.compute.manager raise self.value [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.018547] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 526.018547] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.018547] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 526.019087] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.019087] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 526.019087] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 526.019087] env[62133]: ERROR nova.compute.manager [ 526.019087] env[62133]: Traceback (most recent call last): [ 526.019087] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 526.019087] env[62133]: listener.cb(fileno) [ 526.019087] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.019087] env[62133]: result = function(*args, **kwargs) [ 526.019087] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.019087] env[62133]: return func(*args, **kwargs) [ 526.019087] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.019087] env[62133]: raise e [ 526.019087] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.019087] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 526.019087] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.019087] env[62133]: created_port_ids = self._update_ports_for_instance( [ 526.019087] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.019087] env[62133]: with excutils.save_and_reraise_exception(): [ 526.019087] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.019087] env[62133]: self.force_reraise() [ 526.019087] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.019087] env[62133]: raise self.value [ 526.019087] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.019087] env[62133]: updated_port = self._update_port( [ 526.019087] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.019087] env[62133]: _ensure_no_port_binding_failure(port) [ 526.019087] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.019087] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 526.019817] env[62133]: nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 526.019817] env[62133]: Removing descriptor: 15 [ 526.019817] env[62133]: ERROR nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Traceback (most recent call last): [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] yield resources [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.driver.spawn(context, instance, image_meta, [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 526.019817] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] vm_ref = self.build_virtual_machine(instance, [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] vif_infos = vmwarevif.get_vif_info(self._session, [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] for vif in network_info: [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self._sync_wrapper(fn, *args, **kwargs) [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.wait() [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self[:] = self._gt.wait() [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self._exit_event.wait() [ 526.020153] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] result = hub.switch() [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self.greenlet.switch() [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] result = function(*args, **kwargs) [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return func(*args, **kwargs) [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise e [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] nwinfo = self.network_api.allocate_for_instance( [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 526.020596] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] created_port_ids = self._update_ports_for_instance( [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] with excutils.save_and_reraise_exception(): [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.force_reraise() [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise self.value [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] updated_port = self._update_port( [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] _ensure_no_port_binding_failure(port) [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 526.020928] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise exception.PortBindingFailed(port_id=port['id']) [ 526.021283] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 526.021283] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] [ 526.021283] env[62133]: INFO nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Terminating instance [ 526.022171] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 526.069176] env[62133]: INFO nova.compute.manager [-] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Took 1.09 seconds to deallocate network for instance. [ 526.071470] env[62133]: DEBUG nova.compute.claims [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 526.071470] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.079524] env[62133]: DEBUG nova.network.neutron [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 526.142345] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 526.154146] env[62133]: DEBUG nova.network.neutron [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 526.198313] env[62133]: DEBUG nova.scheduler.client.report [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 526.294639] env[62133]: DEBUG oslo_concurrency.lockutils [req-c5d462b1-36f8-4e5a-ac36-3af0169ef579 req-5b073378-5025-41db-91fb-f6308a06fcf1 service nova] Releasing lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.346821] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 526.388971] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 526.392350] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 526.392530] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.393096] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 526.393096] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.393096] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 526.393289] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 526.393475] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 526.393629] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 526.394024] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 526.394024] env[62133]: DEBUG nova.virt.hardware [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 526.394980] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27619f5f-33b7-487e-b60e-856874d1ad66 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.404415] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e92caa-9842-4282-8dba-23c4cca7e6de {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.657281] env[62133]: DEBUG oslo_concurrency.lockutils [req-99ac4cb4-dcb4-45cf-b0d2-a55065e61233 req-2873ca4d-d519-4d1f-af83-d69415b11b0c service nova] Releasing lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 526.657562] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquired lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 526.657740] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 526.704971] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 526.705146] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 526.713446] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.940s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 526.715820] env[62133]: INFO nova.compute.claims [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 526.728990] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 526.729269] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 526.729689] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 526.729689] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 526.729815] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 526.731020] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 526.731020] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 526.731244] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 526.731563] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 526.731917] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 526.732692] env[62133]: DEBUG nova.virt.hardware [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 526.733679] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb37860-3aba-48b7-8952-ff7bf59818f2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.739195] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Successfully created port: a38e9eb9-5c90-4a44-9cf0-1f5f93036483 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 526.753931] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d67ff5-6cc0-4d50-a984-ae6f0a9884e6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.780441] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance VIF info [] {{(pid=62133) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 526.787120] env[62133]: DEBUG oslo.service.loopingcall [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 526.787872] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Creating VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 526.788112] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2d7b3e60-90ac-4acc-993f-3f073cfba2ad {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 526.807905] env[62133]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 526.807905] env[62133]: value = "task-835128" [ 526.807905] env[62133]: _type = "Task" [ 526.807905] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 526.819823] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835128, 'name': CreateVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.215914] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 527.220299] env[62133]: DEBUG nova.compute.utils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 527.220747] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 527.220747] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 527.320825] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835128, 'name': CreateVM_Task, 'duration_secs': 0.303537} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 527.321071] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Created VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 527.322490] env[62133]: DEBUG oslo_vmware.service [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3ebdfa-f2c8-4e9e-99be-026f3ad9d94b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.328536] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.328707] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.330094] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 527.330094] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24f60b56-ca07-4d5d-ba76-b7cbea7a91d1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.336029] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 527.336029] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]521079e9-41b0-d136-d01c-5ffeff9bac39" [ 527.336029] env[62133]: _type = "Task" [ 527.336029] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.345627] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]521079e9-41b0-d136-d01c-5ffeff9bac39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.420123] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 527.426195] env[62133]: DEBUG nova.policy [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '659bffb6933b4605a85c319065b7bf0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b9afb7e5e93342c3b63360fe74195246', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 527.727826] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 527.741633] env[62133]: DEBUG nova.compute.manager [req-b3d98c2d-5ab5-4bd5-8c7a-d40dae4e4ace req-40cb82f4-a1d5-40d5-82b8-ce7f8196bd30 service nova] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Received event network-vif-deleted-739d8700-7fd6-45a3-8c2a-c13a60475dc1 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 527.848083] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.848361] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Processing image 0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 527.848765] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 527.848932] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 527.849138] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 527.852296] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41cae9b1-3bf8-4e63-87f9-8ef26ed34724 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.871015] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 527.871015] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62133) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 527.871549] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7902c290-d295-4c8f-996e-7f17ee6931b6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.879404] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6987b9a2-a669-4546-a1cd-d5a07c1225cb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.891481] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 527.891481] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52a5f30d-2180-f532-244a-6acc42bdff91" [ 527.891481] env[62133]: _type = "Task" [ 527.891481] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 527.905617] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52a5f30d-2180-f532-244a-6acc42bdff91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 527.913327] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3351ddf0-ca02-4288-89cd-3653f5e51235 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.926510] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6f51f38-9cb1-481b-a198-26ce74f41375 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.933790] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Releasing lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 527.934190] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 527.934368] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 527.934631] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82bc5787-a64f-4272-ab85-4f44ceaf78bf {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.972491] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98ddcc0-bcdb-4d4a-bc9c-fbc7dd805874 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.984200] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4eb244-a9f0-4cb8-b6a7-f642702872a3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.995929] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8e6a7a-229a-4990-bc46-107e97d8b5e4 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.004894] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12 could not be found. [ 528.005102] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 528.005375] env[62133]: INFO nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Took 0.07 seconds to destroy the instance on the hypervisor. [ 528.005742] env[62133]: DEBUG oslo.service.loopingcall [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 528.006517] env[62133]: DEBUG nova.compute.manager [-] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 528.006723] env[62133]: DEBUG nova.network.neutron [-] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 528.017646] env[62133]: DEBUG nova.compute.provider_tree [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 528.052608] env[62133]: DEBUG nova.network.neutron [-] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 528.376292] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Successfully created port: 34081212-a7c2-42e3-b868-a211575a9557 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 528.402716] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Preparing fetch location {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 528.402979] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating directory with path [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 528.403223] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1322f2e3-2206-463a-8d23-3435bb52b8d3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.428279] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Created directory with path [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 528.428541] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Fetch image to [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 528.428769] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloading image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk on the data store datastore1 {{(pid=62133) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 528.429516] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b7f22c-1849-425c-bded-94e8980de0a6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.438659] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03872410-4d2f-4488-9089-8cf4efef662d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.449782] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9286335f-d183-492a-8c63-6a1062a236e9 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.484264] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610492db-4c52-4da7-ae4d-3050ae48269e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.490113] env[62133]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-55da4a7a-54d4-4d6c-bd80-6030e012a5e5 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.522301] env[62133]: DEBUG nova.scheduler.client.report [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 528.555388] env[62133]: DEBUG nova.network.neutron [-] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 528.581303] env[62133]: DEBUG nova.virt.vmwareapi.images [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloading image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to the data store datastore1 {{(pid=62133) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 528.745754] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 528.756777] env[62133]: DEBUG oslo_vmware.rw_handles [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62133) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 528.827034] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 528.828146] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 528.828146] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 528.828146] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 528.828146] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 528.828146] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 528.828394] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 528.828394] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 528.828517] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 528.828673] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 528.829326] env[62133]: DEBUG nova.virt.hardware [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 528.829722] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ded842-43e2-487c-845a-a5463aad5b63 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 528.842876] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7ffe2a-d49f-44ac-98cc-66feb25653eb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.015642] env[62133]: ERROR nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 529.015642] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.015642] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.015642] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.015642] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.015642] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.015642] env[62133]: ERROR nova.compute.manager raise self.value [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.015642] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 529.015642] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.015642] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 529.016378] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.016378] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 529.016378] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 529.016378] env[62133]: ERROR nova.compute.manager [ 529.016378] env[62133]: Traceback (most recent call last): [ 529.016378] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 529.016378] env[62133]: listener.cb(fileno) [ 529.016378] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.016378] env[62133]: result = function(*args, **kwargs) [ 529.016378] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.016378] env[62133]: return func(*args, **kwargs) [ 529.016378] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.016378] env[62133]: raise e [ 529.016378] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.016378] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 529.016378] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.016378] env[62133]: created_port_ids = self._update_ports_for_instance( [ 529.016378] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.016378] env[62133]: with excutils.save_and_reraise_exception(): [ 529.016378] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.016378] env[62133]: self.force_reraise() [ 529.016378] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.016378] env[62133]: raise self.value [ 529.016378] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.016378] env[62133]: updated_port = self._update_port( [ 529.016378] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.016378] env[62133]: _ensure_no_port_binding_failure(port) [ 529.016378] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.016378] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 529.018998] env[62133]: nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 529.018998] env[62133]: Removing descriptor: 18 [ 529.018998] env[62133]: ERROR nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Traceback (most recent call last): [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] yield resources [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.driver.spawn(context, instance, image_meta, [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 529.018998] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] vm_ref = self.build_virtual_machine(instance, [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] vif_infos = vmwarevif.get_vif_info(self._session, [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] for vif in network_info: [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self._sync_wrapper(fn, *args, **kwargs) [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.wait() [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self[:] = self._gt.wait() [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self._exit_event.wait() [ 529.019516] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] result = hub.switch() [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self.greenlet.switch() [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] result = function(*args, **kwargs) [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return func(*args, **kwargs) [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise e [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] nwinfo = self.network_api.allocate_for_instance( [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 529.020037] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] created_port_ids = self._update_ports_for_instance( [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] with excutils.save_and_reraise_exception(): [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.force_reraise() [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise self.value [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] updated_port = self._update_port( [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] _ensure_no_port_binding_failure(port) [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 529.020485] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise exception.PortBindingFailed(port_id=port['id']) [ 529.020966] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 529.020966] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] [ 529.020966] env[62133]: INFO nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Terminating instance [ 529.020966] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquiring lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.021440] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquired lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.021941] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 529.031853] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 529.031853] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 529.034384] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.554s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.063265] env[62133]: INFO nova.compute.manager [-] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Took 1.06 seconds to deallocate network for instance. [ 529.066616] env[62133]: DEBUG nova.compute.claims [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 529.066616] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.124463] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 529.126524] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 529.126755] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Starting heal instance info cache {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9926}} [ 529.126893] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Rebuilding the list of instances to heal {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9930}} [ 529.199534] env[62133]: DEBUG nova.compute.manager [req-dca66502-cc0a-4f3b-8176-6c5538dec22a req-635c53c9-2e3f-4d38-907d-de34aa6a7e07 service nova] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Received event network-vif-deleted-f8d4902d-559e-44bc-b68f-8826931b4441 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 529.461333] env[62133]: DEBUG oslo_vmware.rw_handles [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Completed reading data from the image iterator. {{(pid=62133) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 529.461333] env[62133]: DEBUG oslo_vmware.rw_handles [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62133) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 529.540567] env[62133]: DEBUG nova.compute.utils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 529.549410] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 529.549410] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 529.568066] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 529.599630] env[62133]: DEBUG nova.virt.vmwareapi.images [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Downloaded image file data 0d8cd420-ac77-4102-b0d0-bf339970e2ce to vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk on the data store datastore1 {{(pid=62133) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 529.601572] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Caching image {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 529.601572] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copying Virtual Disk [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk to [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 529.602390] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce27b291-84c6-4967-88f1-ad0418201d06 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.618141] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 529.618141] env[62133]: value = "task-835129" [ 529.618141] env[62133]: _type = "Task" [ 529.618141] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 529.635780] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Skipping network cache update for instance because it is Building. {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9939}} [ 529.637986] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 529.638587] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquired lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 529.638587] env[62133]: DEBUG nova.network.neutron [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Forcefully refreshing network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 529.638587] env[62133]: DEBUG nova.objects.instance [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lazy-loading 'info_cache' on Instance uuid 6174840e-10a6-43ed-be9c-76602f91bf42 {{(pid=62133) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 529.649403] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 529.754713] env[62133]: DEBUG nova.policy [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '24e43c40b81f450da496e0a7f54c6ca7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55c07171d88b4c0bad8a6488b397813a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 529.787826] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7237aca7-832e-4cce-a951-5ccafde6da91 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.794443] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a26436e-b14b-4fa4-82f9-a51bcae77276 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.825760] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ee7386-e14f-4854-b261-2e333d2882f8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.842175] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396e9580-3bc8-49b9-a415-eda3546e10fa {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.863870] env[62133]: DEBUG nova.compute.provider_tree [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 529.887186] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Acquiring lock "584b1af2-6903-4a08-8ed2-9cd21075f0de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.887491] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Lock "584b1af2-6903-4a08-8ed2-9cd21075f0de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.895187] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Acquiring lock "c2c48d98-b791-42c9-8038-d969dda7573e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 529.895267] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Lock "c2c48d98-b791-42c9-8038-d969dda7573e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 529.990154] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 530.051107] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 530.135018] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835129, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.368223] env[62133]: DEBUG nova.scheduler.client.report [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 530.391173] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.398068] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 530.492911] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Releasing lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 530.495762] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 530.495762] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 530.495762] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9bd09b5-e482-4cf5-827f-ef03eba9968a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.506449] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bd0d9c-7719-488b-9a76-805c3f9ba5f1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.532848] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30 could not be found. [ 530.533206] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 530.533317] env[62133]: INFO nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Took 0.04 seconds to destroy the instance on the hypervisor. [ 530.534922] env[62133]: DEBUG oslo.service.loopingcall [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 530.535277] env[62133]: DEBUG nova.compute.manager [-] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 530.535277] env[62133]: DEBUG nova.network.neutron [-] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 530.600842] env[62133]: DEBUG nova.network.neutron [-] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.631192] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686426} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 530.631640] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copied Virtual Disk [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk to [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 530.631795] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce/tmp-sparse.vmdk {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 530.632050] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e58bab4-5398-4a3c-9a81-2b4feae9056c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 530.638846] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 530.638846] env[62133]: value = "task-835130" [ 530.638846] env[62133]: _type = "Task" [ 530.638846] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 530.651398] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 530.697169] env[62133]: DEBUG nova.network.neutron [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 530.877612] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.841s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 530.877612] env[62133]: ERROR nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Traceback (most recent call last): [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.driver.spawn(context, instance, image_meta, [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.877612] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] vm_ref = self.build_virtual_machine(instance, [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] for vif in network_info: [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self._sync_wrapper(fn, *args, **kwargs) [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.wait() [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self[:] = self._gt.wait() [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self._exit_event.wait() [ 530.877920] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] result = hub.switch() [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return self.greenlet.switch() [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] result = function(*args, **kwargs) [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] return func(*args, **kwargs) [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise e [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] nwinfo = self.network_api.allocate_for_instance( [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.878269] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] created_port_ids = self._update_ports_for_instance( [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] with excutils.save_and_reraise_exception(): [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] self.force_reraise() [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise self.value [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] updated_port = self._update_port( [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] _ensure_no_port_binding_failure(port) [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.878715] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] raise exception.PortBindingFailed(port_id=port['id']) [ 530.879926] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] nova.exception.PortBindingFailed: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. [ 530.879926] env[62133]: ERROR nova.compute.manager [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] [ 530.879926] env[62133]: DEBUG nova.compute.utils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 530.879926] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 4.807s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 530.884566] env[62133]: ERROR nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 530.884566] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.884566] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.884566] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.884566] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.884566] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.884566] env[62133]: ERROR nova.compute.manager raise self.value [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.884566] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 530.884566] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.884566] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 530.885061] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.885061] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 530.885061] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 530.885061] env[62133]: ERROR nova.compute.manager [ 530.885061] env[62133]: Traceback (most recent call last): [ 530.885061] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 530.885061] env[62133]: listener.cb(fileno) [ 530.885061] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.885061] env[62133]: result = function(*args, **kwargs) [ 530.885061] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.885061] env[62133]: return func(*args, **kwargs) [ 530.885061] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.885061] env[62133]: raise e [ 530.885061] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.885061] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 530.885061] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.885061] env[62133]: created_port_ids = self._update_ports_for_instance( [ 530.885061] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.885061] env[62133]: with excutils.save_and_reraise_exception(): [ 530.885061] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.885061] env[62133]: self.force_reraise() [ 530.885061] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.885061] env[62133]: raise self.value [ 530.885061] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.885061] env[62133]: updated_port = self._update_port( [ 530.885061] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.885061] env[62133]: _ensure_no_port_binding_failure(port) [ 530.885061] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.885061] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 530.886531] env[62133]: nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 530.886531] env[62133]: Removing descriptor: 16 [ 530.886531] env[62133]: ERROR nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Traceback (most recent call last): [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] yield resources [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.driver.spawn(context, instance, image_meta, [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 530.886531] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] vm_ref = self.build_virtual_machine(instance, [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] vif_infos = vmwarevif.get_vif_info(self._session, [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] for vif in network_info: [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self._sync_wrapper(fn, *args, **kwargs) [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.wait() [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self[:] = self._gt.wait() [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self._exit_event.wait() [ 530.889682] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] result = hub.switch() [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self.greenlet.switch() [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] result = function(*args, **kwargs) [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return func(*args, **kwargs) [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise e [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] nwinfo = self.network_api.allocate_for_instance( [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 530.890128] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] created_port_ids = self._update_ports_for_instance( [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] with excutils.save_and_reraise_exception(): [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.force_reraise() [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise self.value [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] updated_port = self._update_port( [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] _ensure_no_port_binding_failure(port) [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 530.890450] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise exception.PortBindingFailed(port_id=port['id']) [ 530.890783] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 530.890783] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] [ 530.890783] env[62133]: INFO nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Terminating instance [ 530.903672] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Build of instance 801672ea-1218-4c0a-9aae-d94ff1344731 was re-scheduled: Binding failed for port 39252db1-5aa1-4085-b92e-9f2f390e21d0, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 530.904172] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 530.904411] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquiring lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.905762] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Acquired lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.905762] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 530.923295] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquiring lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 530.923295] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquired lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 530.923295] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 530.940066] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 530.948121] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.062398] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 531.104768] env[62133]: DEBUG nova.compute.manager [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Received event network-changed-73123aa0-496f-461c-95b2-f57a8fef14bf {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 531.104768] env[62133]: DEBUG nova.compute.manager [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Refreshing instance network info cache due to event network-changed-73123aa0-496f-461c-95b2-f57a8fef14bf. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 531.104768] env[62133]: DEBUG oslo_concurrency.lockutils [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] Acquiring lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 531.104768] env[62133]: DEBUG oslo_concurrency.lockutils [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] Acquired lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 531.104768] env[62133]: DEBUG nova.network.neutron [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Refreshing network info cache for port 73123aa0-496f-461c-95b2-f57a8fef14bf {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 531.105240] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 531.105240] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 531.105240] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 531.105402] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 531.105402] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 531.105608] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 531.105794] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 531.106326] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 531.106326] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 531.106326] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 531.106517] env[62133]: DEBUG nova.virt.hardware [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 531.107101] env[62133]: DEBUG nova.network.neutron [-] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.109994] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4c701f-90c1-457c-b31f-fdeda02087bd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.123455] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa551060-7647-4366-8c5f-be1189b7fcdb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.154975] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.027118} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.155243] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 531.155454] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Moving file from [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104/0d8cd420-ac77-4102-b0d0-bf339970e2ce to [datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce. {{(pid=62133) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 531.155702] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ac48c805-6f61-4001-b678-babb3eff90dc {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.159347] env[62133]: DEBUG nova.network.neutron [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.165804] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 531.165804] env[62133]: value = "task-835131" [ 531.165804] env[62133]: _type = "Task" [ 531.165804] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.175659] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835131, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.387898] env[62133]: DEBUG nova.network.neutron [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.457800] env[62133]: DEBUG nova.network.neutron [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 531.483988] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.496808] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 531.550783] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328e67c7-a374-482f-9bc1-7ddf87a5ace0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.558727] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81819c1-f460-4bd6-94e0-b4f80849a16f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.591155] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23caaea4-ff09-4d9c-88f9-f246e507904b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.598845] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d66b752-55fe-4bd4-967d-594bec0b0e9d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.613300] env[62133]: INFO nova.compute.manager [-] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Took 1.08 seconds to deallocate network for instance. [ 531.613914] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 531.616761] env[62133]: DEBUG nova.compute.claims [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 531.616889] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 531.675756] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835131, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.027705} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 531.677196] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] File moved {{(pid=62133) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 531.677196] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Cleaning up location [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104 {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 531.677196] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore1] vmware_temp/a3cc68ce-eda9-4375-bbbc-a15943506104 {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 531.677196] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27fe2285-1045-4a5c-a85f-ca52ee06e3d6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 531.682804] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 531.682804] env[62133]: value = "task-835132" [ 531.682804] env[62133]: _type = "Task" [ 531.682804] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 531.693263] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 531.890649] env[62133]: DEBUG oslo_concurrency.lockutils [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] Releasing lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.890909] env[62133]: DEBUG nova.compute.manager [req-103455e9-7b60-4949-b2a8-6b93cfe9ff98 req-ebe609cb-7d12-476d-916b-f555121bc477 service nova] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Received event network-vif-deleted-73123aa0-496f-461c-95b2-f57a8fef14bf {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 531.962556] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Releasing lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 531.962611] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Updated the network info_cache for instance {{(pid=62133) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 531.962803] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.963312] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.963312] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.964021] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.964021] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 531.964021] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.010313] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.015207] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 532.162321] env[62133]: ERROR nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [req-02b1c166-5e65-4c2c-b727-cf364c709808] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2bb88767-ae4a-4310-888a-696a7895e29b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-02b1c166-5e65-4c2c-b727-cf364c709808"}]}: nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 532.206177] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Refreshing inventories for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 532.209905] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023012} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.210905] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 532.214100] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a66ac43-8644-4d25-83ba-36a889e0dedd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.220451] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 532.220451] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]5291d943-ef78-67ef-8ac2-a31da6ef6ed9" [ 532.220451] env[62133]: _type = "Task" [ 532.220451] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.241627] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]5291d943-ef78-67ef-8ac2-a31da6ef6ed9, 'name': SearchDatastore_Task, 'duration_secs': 0.010019} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 532.243923] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating ProviderTree inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 532.244377] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 532.253638] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Releasing lock "[datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.253638] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore1] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 532.253638] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b909eb5f-7671-401c-8e11-0d29f85131ac {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.270852] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 532.270852] env[62133]: value = "task-835133" [ 532.270852] env[62133]: _type = "Task" [ 532.270852] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 532.278676] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Successfully created port: aa3074f9-6c4f-46e2-8201-f9f0bdff1699 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 532.292078] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Refreshing aggregate associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, aggregates: None {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 532.300145] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.337022] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Refreshing trait associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 532.470145] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Getting list of instances from cluster (obj){ [ 532.470145] env[62133]: value = "domain-c8" [ 532.470145] env[62133]: _type = "ClusterComputeResource" [ 532.470145] env[62133]: } {{(pid=62133) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 532.471547] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab0b2df-36d1-4c5a-b8b5-ee3dbe6d16c6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.482967] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Got total of 1 instances {{(pid=62133) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 532.482967] env[62133]: WARNING nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] While synchronizing instance power states, found 6 instances in the database and 1 instances on the hypervisor. [ 532.483072] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid 6174840e-10a6-43ed-be9c-76602f91bf42 {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.483699] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12 {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.483699] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30 {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.483699] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.483699] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid f8330803-064a-4f31-b769-19b0ce281e07 {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.483972] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Triggering sync for uuid f774f4eb-7329-4449-8567-b4c0dc8fd78a {{(pid=62133) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10336}} [ 532.484111] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "6174840e-10a6-43ed-be9c-76602f91bf42" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.484325] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "6174840e-10a6-43ed-be9c-76602f91bf42" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 532.484524] env[62133]: INFO nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] During sync_power_state the instance has a pending task (rebuild_spawning). Skip. [ 532.484706] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "6174840e-10a6-43ed-be9c-76602f91bf42" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 532.485445] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.485445] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.485445] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.485603] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "f8330803-064a-4f31-b769-19b0ce281e07" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.485672] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "f774f4eb-7329-4449-8567-b4c0dc8fd78a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 532.485830] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.486247] env[62133]: DEBUG nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62133) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10545}} [ 532.486539] env[62133]: DEBUG oslo_service.periodic_task [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62133) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 532.515284] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Releasing lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.515284] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 532.515284] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 532.515284] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f46e500d-1eee-4696-bd25-77550f35e2e7 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.519740] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Releasing lock "refresh_cache-801672ea-1218-4c0a-9aae-d94ff1344731" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 532.520530] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 532.520530] env[62133]: DEBUG nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 532.520530] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 532.528114] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432d7e1a-b92a-40bd-a754-837ef1517661 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.553579] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b could not be found. [ 532.553823] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 532.554022] env[62133]: INFO nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 532.554428] env[62133]: DEBUG oslo.service.loopingcall [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 532.558558] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 532.559932] env[62133]: DEBUG nova.compute.manager [-] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 532.560057] env[62133]: DEBUG nova.network.neutron [-] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 532.595610] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cd3cdb-e33c-4300-86d7-3ea1d60cbe16 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.603402] env[62133]: DEBUG nova.network.neutron [-] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 532.608810] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261786a6-79d1-476f-ab41-6af182786aa2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.648072] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4af42a-af98-4930-b357-8dbe1a7ec451 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.657703] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5347d9-f185-4f5f-aa9f-0aaef6472b4b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 532.673287] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 532.786392] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835133, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 532.993313] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.063247] env[62133]: DEBUG nova.network.neutron [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.107740] env[62133]: DEBUG nova.network.neutron [-] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 533.218807] env[62133]: DEBUG nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updated inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b with generation 15 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 533.219185] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating resource provider 2bb88767-ae4a-4310-888a-696a7895e29b generation from 15 to 16 during operation: update_inventory {{(pid=62133) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 533.219879] env[62133]: DEBUG nova.compute.provider_tree [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 533.284200] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537486} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.284267] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore1] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 533.285152] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extending root virtual disk to 1048576 {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 533.285152] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d22097e1-6ee7-46e3-bfb0-352294d4387b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.291582] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 533.291582] env[62133]: value = "task-835134" [ 533.291582] env[62133]: _type = "Task" [ 533.291582] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.299800] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835134, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.565839] env[62133]: INFO nova.compute.manager [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] [instance: 801672ea-1218-4c0a-9aae-d94ff1344731] Took 1.05 seconds to deallocate network for instance. [ 533.610364] env[62133]: INFO nova.compute.manager [-] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Took 1.05 seconds to deallocate network for instance. [ 533.612922] env[62133]: DEBUG nova.compute.claims [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 533.615213] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 533.728017] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.850s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 533.729025] env[62133]: ERROR nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] Traceback (most recent call last): [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.driver.spawn(context, instance, image_meta, [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self._vmops.spawn(context, instance, image_meta, injected_files, [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] vm_ref = self.build_virtual_machine(instance, [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] vif_infos = vmwarevif.get_vif_info(self._session, [ 533.729025] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] for vif in network_info: [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self._sync_wrapper(fn, *args, **kwargs) [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.wait() [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self[:] = self._gt.wait() [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self._exit_event.wait() [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] result = hub.switch() [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 533.729410] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return self.greenlet.switch() [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] result = function(*args, **kwargs) [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] return func(*args, **kwargs) [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise e [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] nwinfo = self.network_api.allocate_for_instance( [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] created_port_ids = self._update_ports_for_instance( [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] with excutils.save_and_reraise_exception(): [ 533.729757] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] self.force_reraise() [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise self.value [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] updated_port = self._update_port( [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] _ensure_no_port_binding_failure(port) [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] raise exception.PortBindingFailed(port_id=port['id']) [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] nova.exception.PortBindingFailed: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. [ 533.730157] env[62133]: ERROR nova.compute.manager [instance: 1225f605-2c40-4628-aefe-93359b077b89] [ 533.730537] env[62133]: DEBUG nova.compute.utils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 533.733606] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.589s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 533.733606] env[62133]: INFO nova.compute.claims [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 533.741194] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Build of instance 1225f605-2c40-4628-aefe-93359b077b89 was re-scheduled: Binding failed for port 739d8700-7fd6-45a3-8c2a-c13a60475dc1, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 533.742040] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 533.742322] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.742481] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquired lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.742662] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 533.804222] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835134, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063594} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 533.805198] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extended root virtual disk {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 533.806026] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061454f4-0373-4063-a2a9-6c938e7b6a1d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.829135] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 533.829791] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fbe7636-63ab-4237-8d7b-75c81807ca21 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 533.850927] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 533.850927] env[62133]: value = "task-835135" [ 533.850927] env[62133]: _type = "Task" [ 533.850927] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 533.860174] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 533.964178] env[62133]: DEBUG nova.compute.manager [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Received event network-changed-34081212-a7c2-42e3-b868-a211575a9557 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 533.964178] env[62133]: DEBUG nova.compute.manager [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Refreshing instance network info cache due to event network-changed-34081212-a7c2-42e3-b868-a211575a9557. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 533.964178] env[62133]: DEBUG oslo_concurrency.lockutils [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] Acquiring lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.964178] env[62133]: DEBUG oslo_concurrency.lockutils [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] Acquired lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.964178] env[62133]: DEBUG nova.network.neutron [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Refreshing network info cache for port 34081212-a7c2-42e3-b868-a211575a9557 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 533.997904] env[62133]: DEBUG nova.compute.manager [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Received event network-changed-a38e9eb9-5c90-4a44-9cf0-1f5f93036483 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 533.997904] env[62133]: DEBUG nova.compute.manager [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Refreshing instance network info cache due to event network-changed-a38e9eb9-5c90-4a44-9cf0-1f5f93036483. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 533.997904] env[62133]: DEBUG oslo_concurrency.lockutils [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] Acquiring lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 533.997904] env[62133]: DEBUG oslo_concurrency.lockutils [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] Acquired lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 533.997904] env[62133]: DEBUG nova.network.neutron [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Refreshing network info cache for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 534.244937] env[62133]: ERROR nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 534.244937] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.244937] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.244937] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.244937] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.244937] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.244937] env[62133]: ERROR nova.compute.manager raise self.value [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.244937] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 534.244937] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.244937] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 534.245504] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.245504] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 534.245504] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 534.245504] env[62133]: ERROR nova.compute.manager [ 534.245504] env[62133]: Traceback (most recent call last): [ 534.245504] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 534.245504] env[62133]: listener.cb(fileno) [ 534.245504] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.245504] env[62133]: result = function(*args, **kwargs) [ 534.245504] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.245504] env[62133]: return func(*args, **kwargs) [ 534.245504] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.245504] env[62133]: raise e [ 534.245504] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.245504] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 534.245504] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.245504] env[62133]: created_port_ids = self._update_ports_for_instance( [ 534.245504] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.245504] env[62133]: with excutils.save_and_reraise_exception(): [ 534.245504] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.245504] env[62133]: self.force_reraise() [ 534.245504] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.245504] env[62133]: raise self.value [ 534.245504] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.245504] env[62133]: updated_port = self._update_port( [ 534.245504] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.245504] env[62133]: _ensure_no_port_binding_failure(port) [ 534.245504] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.245504] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 534.246253] env[62133]: nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 534.246253] env[62133]: Removing descriptor: 19 [ 534.248599] env[62133]: ERROR nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] Traceback (most recent call last): [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] yield resources [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.driver.spawn(context, instance, image_meta, [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] vm_ref = self.build_virtual_machine(instance, [ 534.248599] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] vif_infos = vmwarevif.get_vif_info(self._session, [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] for vif in network_info: [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self._sync_wrapper(fn, *args, **kwargs) [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.wait() [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self[:] = self._gt.wait() [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self._exit_event.wait() [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 534.248987] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] result = hub.switch() [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self.greenlet.switch() [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] result = function(*args, **kwargs) [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return func(*args, **kwargs) [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise e [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] nwinfo = self.network_api.allocate_for_instance( [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] created_port_ids = self._update_ports_for_instance( [ 534.249363] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] with excutils.save_and_reraise_exception(): [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.force_reraise() [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise self.value [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] updated_port = self._update_port( [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] _ensure_no_port_binding_failure(port) [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise exception.PortBindingFailed(port_id=port['id']) [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 534.249722] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] [ 534.250082] env[62133]: INFO nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Terminating instance [ 534.250765] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquiring lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 534.314751] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.366497] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835135, 'name': ReconfigVM_Task, 'duration_secs': 0.287988} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.366905] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 534.367484] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63ef81ef-86b6-45e4-91e6-a547838179ae {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.373904] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 534.373904] env[62133]: value = "task-835136" [ 534.373904] env[62133]: _type = "Task" [ 534.373904] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.382917] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835136, 'name': Rename_Task} progress is 5%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.501676] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.514442] env[62133]: DEBUG nova.network.neutron [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.565104] env[62133]: DEBUG nova.network.neutron [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 534.613096] env[62133]: INFO nova.scheduler.client.report [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Deleted allocations for instance 801672ea-1218-4c0a-9aae-d94ff1344731 [ 534.704685] env[62133]: DEBUG nova.network.neutron [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 534.887480] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835136, 'name': Rename_Task, 'duration_secs': 0.131672} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 534.887869] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 534.891829] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5580041-1d6e-4672-b44f-081c76e219e3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.900801] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 534.900801] env[62133]: value = "task-835137" [ 534.900801] env[62133]: _type = "Task" [ 534.900801] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 534.914730] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 534.973297] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5f6b48-e401-43e4-be55-f61df9d20877 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 534.981490] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496124ed-95b0-4031-8a47-94c66b6886c1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.017416] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Releasing lock "refresh_cache-1225f605-2c40-4628-aefe-93359b077b89" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.017726] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 535.017858] env[62133]: DEBUG nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 535.017995] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 535.020957] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6560d659-664c-49e7-a681-46a4e633ad5d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.032308] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6fa0246-eb10-4293-a68e-4fa62d8d26ff {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.037691] env[62133]: DEBUG nova.network.neutron [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.050684] env[62133]: DEBUG nova.compute.provider_tree [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 535.085773] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.125730] env[62133]: DEBUG oslo_concurrency.lockutils [None req-072c0b3c-1ffb-4648-864b-2bb752d6ea06 tempest-InstanceActionsNegativeTestJSON-1145370140 tempest-InstanceActionsNegativeTestJSON-1145370140-project-member] Lock "801672ea-1218-4c0a-9aae-d94ff1344731" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.233s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 535.208031] env[62133]: DEBUG oslo_concurrency.lockutils [req-a3272f50-02e2-45af-9111-0ade36e3e30e req-ef1bfd29-b710-4716-8149-24878bef3947 service nova] Releasing lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.208676] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquired lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 535.208753] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 535.415492] env[62133]: DEBUG oslo_vmware.api [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835137, 'name': PowerOnVM_Task, 'duration_secs': 0.447358} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 535.415821] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 535.415992] env[62133]: DEBUG nova.compute.manager [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 535.416886] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8e10f1-f9a8-48db-b038-2e06c023607d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 535.542724] env[62133]: DEBUG oslo_concurrency.lockutils [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] Releasing lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 535.542825] env[62133]: DEBUG nova.compute.manager [req-69eb8fb0-6a1e-4bd0-8c73-4fb0cfa3c4da req-a3fdbab1-fe42-410e-804a-2a0322662fef service nova] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Received event network-vif-deleted-a38e9eb9-5c90-4a44-9cf0-1f5f93036483 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 535.555196] env[62133]: DEBUG nova.scheduler.client.report [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 535.590375] env[62133]: DEBUG nova.network.neutron [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 535.941174] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 535.947024] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.060591] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 536.061112] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 536.064485] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.998s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 536.095398] env[62133]: INFO nova.compute.manager [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] [instance: 1225f605-2c40-4628-aefe-93359b077b89] Took 1.08 seconds to deallocate network for instance. [ 536.238491] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 536.571740] env[62133]: DEBUG nova.compute.utils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 536.581355] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Not allocating networking since 'none' was specified. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 536.744115] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Releasing lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 536.744115] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 536.744115] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 536.744115] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-775e39c3-fefd-4fed-9ba7-c5d27737850c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.757119] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad03b457-8601-4732-b6e4-7ebd6d89740a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.776821] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d69b66-ad1b-4116-8702-4a28ad43eb8d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.787222] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b24abd4-c010-497d-8e1e-209f8051c40a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.798601] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f8330803-064a-4f31-b769-19b0ce281e07 could not be found. [ 536.798601] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 536.798601] env[62133]: INFO nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Took 0.06 seconds to destroy the instance on the hypervisor. [ 536.798601] env[62133]: DEBUG oslo.service.loopingcall [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 536.798601] env[62133]: DEBUG nova.compute.manager [-] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 536.798601] env[62133]: DEBUG nova.network.neutron [-] [instance: f8330803-064a-4f31-b769-19b0ce281e07] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 536.830399] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e36e91b-3426-4055-b383-6e495a768a9f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.838767] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c800f73f-5d70-4cc8-aca8-4d744088b6ab {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.853086] env[62133]: DEBUG nova.compute.provider_tree [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 536.909170] env[62133]: DEBUG nova.network.neutron [-] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 536.969682] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Acquiring lock "2fbc4d53-a000-4f14-a4b7-087aee2129bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 536.970022] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Lock "2fbc4d53-a000-4f14-a4b7-087aee2129bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.085453] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 537.134044] env[62133]: INFO nova.scheduler.client.report [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Deleted allocations for instance 1225f605-2c40-4628-aefe-93359b077b89 [ 537.358077] env[62133]: DEBUG nova.scheduler.client.report [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.416493] env[62133]: DEBUG nova.network.neutron [-] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 537.473636] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 537.642673] env[62133]: DEBUG oslo_concurrency.lockutils [None req-dd99a8a3-a5ab-48df-834c-ef7613150965 tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "1225f605-2c40-4628-aefe-93359b077b89" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.274s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.864441] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.800s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.865126] env[62133]: ERROR nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Traceback (most recent call last): [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.driver.spawn(context, instance, image_meta, [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self._vmops.spawn(context, instance, image_meta, injected_files, [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] vm_ref = self.build_virtual_machine(instance, [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] vif_infos = vmwarevif.get_vif_info(self._session, [ 537.865126] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] for vif in network_info: [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self._sync_wrapper(fn, *args, **kwargs) [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.wait() [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self[:] = self._gt.wait() [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self._exit_event.wait() [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] result = hub.switch() [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 537.865523] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return self.greenlet.switch() [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] result = function(*args, **kwargs) [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] return func(*args, **kwargs) [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise e [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] nwinfo = self.network_api.allocate_for_instance( [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] created_port_ids = self._update_ports_for_instance( [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] with excutils.save_and_reraise_exception(): [ 537.865876] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] self.force_reraise() [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise self.value [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] updated_port = self._update_port( [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] _ensure_no_port_binding_failure(port) [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] raise exception.PortBindingFailed(port_id=port['id']) [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] nova.exception.PortBindingFailed: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. [ 537.866247] env[62133]: ERROR nova.compute.manager [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] [ 537.866579] env[62133]: DEBUG nova.compute.utils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 537.872294] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.927s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.873354] env[62133]: INFO nova.compute.claims [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 537.876932] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Build of instance b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12 was re-scheduled: Binding failed for port f8d4902d-559e-44bc-b68f-8826931b4441, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 537.877460] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 537.877685] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 537.877827] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquired lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 537.877978] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 537.918559] env[62133]: INFO nova.compute.manager [-] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Took 1.12 seconds to deallocate network for instance. [ 537.921730] env[62133]: DEBUG nova.compute.claims [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 537.921943] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.000396] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.093705] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 538.130117] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 538.130704] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 538.130704] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 538.130704] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 538.130868] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 538.131346] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 538.134347] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 538.134590] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 538.134809] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 538.134983] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 538.135131] env[62133]: DEBUG nova.virt.hardware [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 538.136242] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af490a5-2587-4753-a991-3f6b13a596d6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.148161] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65d8d45-a630-4a60-ab96-9452ef752be9 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.167983] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Instance VIF info [] {{(pid=62133) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 538.174597] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Creating folder: Project (c714c34befc1408c8f4d8a431ac95850). Parent ref: group-v192832. {{(pid=62133) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 538.176183] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9629285-a977-4c57-9758-3b09c8987c58 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.184021] env[62133]: DEBUG oslo_concurrency.lockutils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Acquiring lock "0773334e-8541-439e-aa0c-83dacac16242" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.184021] env[62133]: DEBUG oslo_concurrency.lockutils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Lock "0773334e-8541-439e-aa0c-83dacac16242" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.198023] env[62133]: INFO nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Created folder: Project (c714c34befc1408c8f4d8a431ac95850) in parent group-v192832. [ 538.198023] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Creating folder: Instances. Parent ref: group-v192837. {{(pid=62133) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 538.198023] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05cdb482-2227-4658-8280-55cbb7821af2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.206554] env[62133]: INFO nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Created folder: Instances in parent group-v192837. [ 538.206799] env[62133]: DEBUG oslo.service.loopingcall [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 538.206986] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Creating VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 538.207210] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82be096d-14be-4a6f-9b56-e712de989720 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.233574] env[62133]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 538.233574] env[62133]: value = "task-835140" [ 538.233574] env[62133]: _type = "Task" [ 538.233574] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.244336] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835140, 'name': CreateVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.257499] env[62133]: ERROR nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 538.257499] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.257499] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.257499] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.257499] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.257499] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.257499] env[62133]: ERROR nova.compute.manager raise self.value [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.257499] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 538.257499] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.257499] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 538.258254] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.258254] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 538.258254] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 538.258254] env[62133]: ERROR nova.compute.manager [ 538.258254] env[62133]: Traceback (most recent call last): [ 538.258254] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 538.258254] env[62133]: listener.cb(fileno) [ 538.258254] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.258254] env[62133]: result = function(*args, **kwargs) [ 538.258254] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.258254] env[62133]: return func(*args, **kwargs) [ 538.258254] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 538.258254] env[62133]: raise e [ 538.258254] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.258254] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 538.258254] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.258254] env[62133]: created_port_ids = self._update_ports_for_instance( [ 538.258254] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.258254] env[62133]: with excutils.save_and_reraise_exception(): [ 538.258254] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.258254] env[62133]: self.force_reraise() [ 538.258254] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.258254] env[62133]: raise self.value [ 538.258254] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.258254] env[62133]: updated_port = self._update_port( [ 538.258254] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.258254] env[62133]: _ensure_no_port_binding_failure(port) [ 538.258254] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.258254] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 538.259064] env[62133]: nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 538.259064] env[62133]: Removing descriptor: 15 [ 538.259064] env[62133]: ERROR nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Traceback (most recent call last): [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] yield resources [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.driver.spawn(context, instance, image_meta, [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 538.259064] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] vm_ref = self.build_virtual_machine(instance, [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] vif_infos = vmwarevif.get_vif_info(self._session, [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] for vif in network_info: [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self._sync_wrapper(fn, *args, **kwargs) [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.wait() [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self[:] = self._gt.wait() [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self._exit_event.wait() [ 538.259435] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] result = hub.switch() [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self.greenlet.switch() [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] result = function(*args, **kwargs) [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return func(*args, **kwargs) [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise e [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] nwinfo = self.network_api.allocate_for_instance( [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 538.259754] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] created_port_ids = self._update_ports_for_instance( [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] with excutils.save_and_reraise_exception(): [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.force_reraise() [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise self.value [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] updated_port = self._update_port( [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] _ensure_no_port_binding_failure(port) [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 538.263920] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise exception.PortBindingFailed(port_id=port['id']) [ 538.264413] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 538.264413] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] [ 538.264413] env[62133]: INFO nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Terminating instance [ 538.264413] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquiring lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.264413] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquired lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.264413] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 538.463217] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 538.633098] env[62133]: DEBUG nova.compute.manager [req-87258f2c-3a03-477d-a19b-af49e5e29bd3 req-9b846426-148d-4c83-8d71-60e37af136b3 service nova] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Received event network-vif-deleted-34081212-a7c2-42e3-b868-a211575a9557 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 538.690928] env[62133]: DEBUG nova.compute.manager [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.700253] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 538.751136] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835140, 'name': CreateVM_Task, 'duration_secs': 0.272847} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 538.751362] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Created VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 538.751772] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 538.751942] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.752291] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 538.752661] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b068a045-b2a0-4002-99f7-e6415bbdc61a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.758189] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 538.758189] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b33c0c-6d7a-f986-696a-10702ff2ae89" [ 538.758189] env[62133]: _type = "Task" [ 538.758189] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 538.769747] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b33c0c-6d7a-f986-696a-10702ff2ae89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 538.944098] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.100452] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf88c1b-c563-42ab-8443-49d7e3ed4a6a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.111965] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b876a132-9795-4f86-b98a-a6900f5524b3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.152653] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e731b11c-c9f2-4b64-9641-466c1c26379b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.163990] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a70201b-6ec7-49f5-8882-c02fc724ca5b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.178059] env[62133]: DEBUG nova.compute.provider_tree [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.195603] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.202224] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Releasing lock "refresh_cache-b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.202445] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 539.202645] env[62133]: DEBUG nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.202818] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.216619] env[62133]: DEBUG oslo_concurrency.lockutils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.255306] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.269744] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b33c0c-6d7a-f986-696a-10702ff2ae89, 'name': SearchDatastore_Task, 'duration_secs': 0.036402} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.270824] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.271123] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Processing image 0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 539.271368] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 539.271541] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 539.271681] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 539.272598] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dda4d94-02b3-4b6b-9937-75b9bb301f2f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.286065] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 539.286338] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62133) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 539.287185] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64d9dcf-a189-4ada-9e4f-fc21cfea34cd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.298551] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 539.298551] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]5243e327-c63d-9b0d-f749-0890af55983f" [ 539.298551] env[62133]: _type = "Task" [ 539.298551] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.315513] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]5243e327-c63d-9b0d-f749-0890af55983f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 539.681194] env[62133]: DEBUG nova.scheduler.client.report [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 539.701562] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Releasing lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 539.701562] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 539.701562] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 539.701562] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5c9e32af-5f87-47ac-b19d-5df4a44ab355 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.713204] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd84dab4-2584-4d1d-9304-e408c67f87bd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.745558] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f774f4eb-7329-4449-8567-b4c0dc8fd78a could not be found. [ 539.745873] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 539.745960] env[62133]: INFO nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 539.746222] env[62133]: DEBUG oslo.service.loopingcall [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 539.746474] env[62133]: DEBUG nova.compute.manager [-] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 539.746568] env[62133]: DEBUG nova.network.neutron [-] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 539.759849] env[62133]: DEBUG nova.network.neutron [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 539.805659] env[62133]: DEBUG nova.network.neutron [-] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 539.817342] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]5243e327-c63d-9b0d-f749-0890af55983f, 'name': SearchDatastore_Task, 'duration_secs': 0.012642} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 539.818782] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9216c46b-9de2-47d0-8730-76f6c7d6e192 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.826155] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 539.826155] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52c642bf-0bef-5e41-269b-3ac72bd1e996" [ 539.826155] env[62133]: _type = "Task" [ 539.826155] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 539.838884] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52c642bf-0bef-5e41-269b-3ac72bd1e996, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.188942] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.322s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.190545] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 540.197043] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.248s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.201020] env[62133]: INFO nova.compute.claims [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.262760] env[62133]: INFO nova.compute.manager [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] Took 1.06 seconds to deallocate network for instance. [ 540.308923] env[62133]: DEBUG nova.network.neutron [-] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 540.341496] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52c642bf-0bef-5e41-269b-3ac72bd1e996, 'name': SearchDatastore_Task, 'duration_secs': 0.024085} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 540.341753] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 540.342316] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 3410b0c4-da88-4146-bb50-9b5a9a5d0d87/3410b0c4-da88-4146-bb50-9b5a9a5d0d87.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 540.342316] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45490bfa-9079-4ae0-b886-04da24a369d8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 540.354261] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 540.354261] env[62133]: value = "task-835141" [ 540.354261] env[62133]: _type = "Task" [ 540.354261] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 540.364074] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.706352] env[62133]: DEBUG nova.compute.utils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.714746] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 540.715028] env[62133]: DEBUG nova.network.neutron [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 540.814176] env[62133]: INFO nova.compute.manager [-] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Took 1.07 seconds to deallocate network for instance. [ 540.818804] env[62133]: DEBUG nova.compute.claims [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 540.820041] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.865564] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835141, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 540.946087] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] Acquiring lock "f62a3436-a4a7-42a2-8023-913319df1d47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.946319] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] Lock "f62a3436-a4a7-42a2-8023-913319df1d47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.112861] env[62133]: DEBUG nova.policy [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db1159a71f0144b599db8133b8ba4968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e8453570b4054e0e86220327f0ae1865', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 541.216102] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 541.304019] env[62133]: INFO nova.scheduler.client.report [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Deleted allocations for instance b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12 [ 541.371370] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835141, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523039} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.371638] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 3410b0c4-da88-4146-bb50-9b5a9a5d0d87/3410b0c4-da88-4146-bb50-9b5a9a5d0d87.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 541.373024] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Extending root virtual disk to 1048576 {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 541.373024] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5a2a1b8-4815-48d1-ba9d-8bc95e50d6e0 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.378987] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 541.378987] env[62133]: value = "task-835142" [ 541.378987] env[62133]: _type = "Task" [ 541.378987] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.389395] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 541.481155] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34609fbb-d9e1-4cb9-8c4d-f1b88cfb5d5b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.489648] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d92743d-cc08-41a5-a9b7-6ababa9781cb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.527201] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe90659b-b67a-4afe-8609-867b54f96a73 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.536491] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456eff60-d83b-4f53-8cb8-2bebaac46a01 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.553104] env[62133]: DEBUG nova.compute.provider_tree [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 541.815997] env[62133]: DEBUG oslo_concurrency.lockutils [None req-c4da42a9-7c9b-4503-b09e-3ef3d0c1c5ec tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.221s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.818625] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 9.332s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.818625] env[62133]: INFO nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12] During sync_power_state the instance has a pending task (spawning). Skip. [ 541.818625] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "b0cf64fa-a7f8-40f5-af5e-c10c01dd4a12" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 541.888810] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064432} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 541.889750] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Extended root virtual disk {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 541.890694] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ce1896-d725-42fc-b378-c89213a8f572 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.914296] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 3410b0c4-da88-4146-bb50-9b5a9a5d0d87/3410b0c4-da88-4146-bb50-9b5a9a5d0d87.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 541.914968] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6cf9883-0375-485d-a84f-7dde66688ba1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.937055] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 541.937055] env[62133]: value = "task-835143" [ 541.937055] env[62133]: _type = "Task" [ 541.937055] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 541.945410] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835143, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.086062] env[62133]: ERROR nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [req-b051d55d-e635-4e88-83a2-d7a58dd9624f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2bb88767-ae4a-4310-888a-696a7895e29b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b051d55d-e635-4e88-83a2-d7a58dd9624f"}]} [ 542.116922] env[62133]: DEBUG nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Refreshing inventories for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 542.147319] env[62133]: DEBUG nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating ProviderTree inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 542.147507] env[62133]: DEBUG nova.compute.provider_tree [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 542.170891] env[62133]: DEBUG nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Refreshing aggregate associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, aggregates: None {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 542.199206] env[62133]: DEBUG nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Refreshing trait associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 542.216659] env[62133]: DEBUG nova.network.neutron [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Successfully created port: 713659db-b702-4ebc-9dfa-675daceee5eb {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.235201] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 542.242082] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f5293d3e-f006-4072-8533-7751ad7d3616 tempest-ServerDiskConfigTestJSON-42107633 tempest-ServerDiskConfigTestJSON-42107633-project-member] Acquiring lock "70edd0f9-b499-4868-99a8-c62c92a15292" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.242359] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f5293d3e-f006-4072-8533-7751ad7d3616 tempest-ServerDiskConfigTestJSON-42107633 tempest-ServerDiskConfigTestJSON-42107633-project-member] Lock "70edd0f9-b499-4868-99a8-c62c92a15292" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.262347] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:11:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1127011745',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1172699097',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.266611] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.266611] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.266611] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.266611] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.266611] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.266817] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.266930] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.267127] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.267235] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.267407] env[62133]: DEBUG nova.virt.hardware [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.268300] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e32df5-dbd0-417c-b935-58e0d55a09f8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.279777] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e28eab2-ab87-4271-87c0-05efeeb9277e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.322527] env[62133]: DEBUG nova.compute.manager [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] [instance: f62a3436-a4a7-42a2-8023-913319df1d47] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.449918] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835143, 'name': ReconfigVM_Task, 'duration_secs': 0.290485} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.451089] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 3410b0c4-da88-4146-bb50-9b5a9a5d0d87/3410b0c4-da88-4146-bb50-9b5a9a5d0d87.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 542.452220] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2cf7ab-c430-4e89-920c-a1d1abe146f3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.454983] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af4f5e35-899a-42f1-8fb7-44b4068b2082 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.461244] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79066329-3c26-4926-994c-156850c96793 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.465577] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 542.465577] env[62133]: value = "task-835144" [ 542.465577] env[62133]: _type = "Task" [ 542.465577] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.497223] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3409544f-0712-4515-8293-2b1f606d5e3b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.503122] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835144, 'name': Rename_Task} progress is 14%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 542.508095] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4a2ffe-104c-4c06-b57b-9efb7346ea66 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.522175] env[62133]: DEBUG nova.compute.provider_tree [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 542.846310] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.981086] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835144, 'name': Rename_Task, 'duration_secs': 0.130518} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 542.981086] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Powering on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 542.981086] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc740856-560f-4615-817e-dca255f204cd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.988933] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Waiting for the task: (returnval){ [ 542.988933] env[62133]: value = "task-835145" [ 542.988933] env[62133]: _type = "Task" [ 542.988933] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 542.998684] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835145, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 543.099612] env[62133]: DEBUG nova.scheduler.client.report [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updated inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b with generation 20 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 543.100974] env[62133]: DEBUG nova.compute.provider_tree [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating resource provider 2bb88767-ae4a-4310-888a-696a7895e29b generation from 20 to 21 during operation: update_inventory {{(pid=62133) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 543.100974] env[62133]: DEBUG nova.compute.provider_tree [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 543.503054] env[62133]: DEBUG oslo_vmware.api [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Task: {'id': task-835145, 'name': PowerOnVM_Task, 'duration_secs': 0.502736} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 543.503370] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Powered on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 543.504368] env[62133]: INFO nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Took 5.41 seconds to spawn the instance on the hypervisor. [ 543.504368] env[62133]: DEBUG nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 543.504538] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a71a42b-e9a7-4550-9e02-1fdaec7cab87 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.607462] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.411s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 543.609340] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 543.613210] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.996s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 543.995280] env[62133]: DEBUG oslo_concurrency.lockutils [None req-569bd7d0-9a0f-4b5e-aaf2-7b1c2cc9e6c4 tempest-ImagesOneServerTestJSON-1256341287 tempest-ImagesOneServerTestJSON-1256341287-project-member] Acquiring lock "e9f6cbc1-b11a-4361-aa3b-ec5eef3d703e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 543.995280] env[62133]: DEBUG oslo_concurrency.lockutils [None req-569bd7d0-9a0f-4b5e-aaf2-7b1c2cc9e6c4 tempest-ImagesOneServerTestJSON-1256341287 tempest-ImagesOneServerTestJSON-1256341287-project-member] Lock "e9f6cbc1-b11a-4361-aa3b-ec5eef3d703e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.030634] env[62133]: INFO nova.compute.manager [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Took 17.91 seconds to build instance. [ 544.120135] env[62133]: DEBUG nova.compute.utils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 544.127788] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 544.127788] env[62133]: DEBUG nova.network.neutron [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 544.197850] env[62133]: DEBUG nova.policy [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b72e3ac23f684b6cab3cebf17b04d5e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0b45c111e114b2798640514bc58a9bd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 544.243240] env[62133]: INFO nova.compute.manager [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Rebuilding instance [ 544.280718] env[62133]: DEBUG nova.compute.manager [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Received event network-changed-aa3074f9-6c4f-46e2-8201-f9f0bdff1699 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 544.281190] env[62133]: DEBUG nova.compute.manager [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Refreshing instance network info cache due to event network-changed-aa3074f9-6c4f-46e2-8201-f9f0bdff1699. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 544.281498] env[62133]: DEBUG oslo_concurrency.lockutils [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] Acquiring lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.281641] env[62133]: DEBUG oslo_concurrency.lockutils [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] Acquired lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.283027] env[62133]: DEBUG nova.network.neutron [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Refreshing network info cache for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 544.313939] env[62133]: DEBUG nova.compute.manager [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 544.314787] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c9e40c-3670-4782-a0bb-bbbc0104a3e3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.393654] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b89cf42-8ad1-4330-8ddf-75c8aac70495 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.401344] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fddac5d-95c9-4ede-b4f9-c99f51ace5c8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.434768] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61f491b-0219-40b7-ad6c-3e60c52832e8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.442269] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcf43cf-fe9a-471b-8da7-cefe76dc3919 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.456083] env[62133]: DEBUG nova.compute.provider_tree [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 544.531463] env[62133]: DEBUG oslo_concurrency.lockutils [None req-446350b7-afa6-4a3c-813b-3be7c5bc283c tempest-ServerDiagnosticsV248Test-1326051369 tempest-ServerDiagnosticsV248Test-1326051369-project-member] Lock "3410b0c4-da88-4146-bb50-9b5a9a5d0d87" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.420s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.628863] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 544.825536] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 544.825829] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bba7fb8c-2be3-499e-a541-087aef7d2ad6 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.829678] env[62133]: DEBUG nova.network.neutron [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.833812] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 544.833812] env[62133]: value = "task-835146" [ 544.833812] env[62133]: _type = "Task" [ 544.833812] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 544.843901] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 544.959693] env[62133]: DEBUG nova.scheduler.client.report [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.997935] env[62133]: DEBUG nova.network.neutron [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.035277] env[62133]: DEBUG nova.compute.manager [None req-f5293d3e-f006-4072-8533-7751ad7d3616 tempest-ServerDiskConfigTestJSON-42107633 tempest-ServerDiskConfigTestJSON-42107633-project-member] [instance: 70edd0f9-b499-4868-99a8-c62c92a15292] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 545.189955] env[62133]: DEBUG nova.network.neutron [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Successfully created port: f99571ee-62f3-44d4-9b3f-40a5e71946e4 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 545.344980] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835146, 'name': PowerOffVM_Task, 'duration_secs': 0.113745} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.345420] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 545.347125] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 545.348322] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4627d01-b769-4b98-af06-0be041857e0a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.361848] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistering the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 545.362968] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6817adf-12c6-4639-92a7-f0b362b2c0c4 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.387885] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistered the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 545.388232] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleting contents of the VM from datastore datastore1 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 545.388314] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Deleting the datastore file [datastore1] 6174840e-10a6-43ed-be9c-76602f91bf42 {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 545.389316] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d88917b4-161d-4982-a039-7b0ff0c51c38 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.395557] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 545.395557] env[62133]: value = "task-835148" [ 545.395557] env[62133]: _type = "Task" [ 545.395557] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 545.406518] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 545.446194] env[62133]: ERROR nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 713659db-b702-4ebc-9dfa-675daceee5eb, please check neutron logs for more information. [ 545.446194] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.446194] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.446194] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.446194] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.446194] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.446194] env[62133]: ERROR nova.compute.manager raise self.value [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.446194] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 545.446194] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.446194] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 545.446905] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.446905] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 545.446905] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 713659db-b702-4ebc-9dfa-675daceee5eb, please check neutron logs for more information. [ 545.446905] env[62133]: ERROR nova.compute.manager [ 545.446905] env[62133]: Traceback (most recent call last): [ 545.446905] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 545.446905] env[62133]: listener.cb(fileno) [ 545.446905] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.446905] env[62133]: result = function(*args, **kwargs) [ 545.446905] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.446905] env[62133]: return func(*args, **kwargs) [ 545.446905] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.446905] env[62133]: raise e [ 545.446905] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.446905] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 545.446905] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.446905] env[62133]: created_port_ids = self._update_ports_for_instance( [ 545.446905] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.446905] env[62133]: with excutils.save_and_reraise_exception(): [ 545.446905] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.446905] env[62133]: self.force_reraise() [ 545.446905] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.446905] env[62133]: raise self.value [ 545.446905] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.446905] env[62133]: updated_port = self._update_port( [ 545.446905] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.446905] env[62133]: _ensure_no_port_binding_failure(port) [ 545.446905] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.446905] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 545.447973] env[62133]: nova.exception.PortBindingFailed: Binding failed for port 713659db-b702-4ebc-9dfa-675daceee5eb, please check neutron logs for more information. [ 545.447973] env[62133]: Removing descriptor: 15 [ 545.447973] env[62133]: ERROR nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 713659db-b702-4ebc-9dfa-675daceee5eb, please check neutron logs for more information. [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Traceback (most recent call last): [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] yield resources [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] self.driver.spawn(context, instance, image_meta, [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.447973] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] vm_ref = self.build_virtual_machine(instance, [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] for vif in network_info: [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] return self._sync_wrapper(fn, *args, **kwargs) [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] self.wait() [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] self[:] = self._gt.wait() [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] return self._exit_event.wait() [ 545.448434] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] result = hub.switch() [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] return self.greenlet.switch() [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] result = function(*args, **kwargs) [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] return func(*args, **kwargs) [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] raise e [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] nwinfo = self.network_api.allocate_for_instance( [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.449281] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] created_port_ids = self._update_ports_for_instance( [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] with excutils.save_and_reraise_exception(): [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] self.force_reraise() [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] raise self.value [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] updated_port = self._update_port( [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] _ensure_no_port_binding_failure(port) [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.450110] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] raise exception.PortBindingFailed(port_id=port['id']) [ 545.450456] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] nova.exception.PortBindingFailed: Binding failed for port 713659db-b702-4ebc-9dfa-675daceee5eb, please check neutron logs for more information. [ 545.450456] env[62133]: ERROR nova.compute.manager [instance: c2c48d98-b791-42c9-8038-d969dda7573e] [ 545.450456] env[62133]: INFO nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Terminating instance [ 545.450456] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Acquiring lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.450603] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Acquired lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.450782] env[62133]: DEBUG nova.network.neutron [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.470407] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.857s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.471401] env[62133]: ERROR nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Traceback (most recent call last): [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.driver.spawn(context, instance, image_meta, [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] vm_ref = self.build_virtual_machine(instance, [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.471401] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] for vif in network_info: [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self._sync_wrapper(fn, *args, **kwargs) [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.wait() [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self[:] = self._gt.wait() [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self._exit_event.wait() [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] result = hub.switch() [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.471770] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return self.greenlet.switch() [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] result = function(*args, **kwargs) [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] return func(*args, **kwargs) [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise e [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] nwinfo = self.network_api.allocate_for_instance( [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] created_port_ids = self._update_ports_for_instance( [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] with excutils.save_and_reraise_exception(): [ 545.472127] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] self.force_reraise() [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise self.value [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] updated_port = self._update_port( [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] _ensure_no_port_binding_failure(port) [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] raise exception.PortBindingFailed(port_id=port['id']) [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] nova.exception.PortBindingFailed: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. [ 545.472457] env[62133]: ERROR nova.compute.manager [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] [ 545.472759] env[62133]: DEBUG nova.compute.utils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 545.473606] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 12.483s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.473797] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 545.473948] env[62133]: DEBUG nova.compute.resource_tracker [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62133) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 545.474245] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.861s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 545.477850] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Build of instance 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30 was re-scheduled: Binding failed for port 73123aa0-496f-461c-95b2-f57a8fef14bf, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 545.483132] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 545.483132] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquiring lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.483132] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Acquired lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.483132] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.484550] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8279522-1aa7-4375-a240-8a0c30ddbfa2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.500459] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0ab9b4-9c08-4d9f-a3b1-5963446692f3 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.507038] env[62133]: DEBUG oslo_concurrency.lockutils [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] Releasing lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.507038] env[62133]: DEBUG nova.compute.manager [req-40d668ad-efce-4bd9-b3d5-92900474ff41 req-14a1d7a2-46bc-49fc-9d7f-22341351f43a service nova] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Received event network-vif-deleted-aa3074f9-6c4f-46e2-8201-f9f0bdff1699 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 545.517393] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b860db9f-883c-47e7-bbe6-a18b4e51eafa {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.524120] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b0c1eac-31f3-4dc8-9520-be974d04a934 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.559327] env[62133]: DEBUG nova.compute.resource_tracker [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181664MB free_disk=61GB free_vcpus=48 pci_devices=None {{(pid=62133) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 545.559780] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.596755] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f5293d3e-f006-4072-8533-7751ad7d3616 tempest-ServerDiskConfigTestJSON-42107633 tempest-ServerDiskConfigTestJSON-42107633-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.643859] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 545.677352] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 545.677650] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 545.677816] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 545.677997] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 545.678321] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 545.678504] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 545.678748] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 545.678958] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 545.679208] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 545.679488] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 545.679738] env[62133]: DEBUG nova.virt.hardware [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 545.680696] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fca211a-cd30-40a3-bcb1-b70e46225fe7 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.688885] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a6c259-5f6a-43c6-bc1a-2de826502387 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 545.905852] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098383} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 545.906900] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 545.907148] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleted contents of the VM from datastore datastore1 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 545.907535] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 545.988351] env[62133]: DEBUG nova.network.neutron [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.029470] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.252478] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90294e6-7c29-42ae-bed2-4d8a02f220af {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.262387] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46f0256-172a-43d9-aa40-c771c70ccfef {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.307720] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d20a03-430f-4f25-9f45-89ec51a4c5ea {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.315451] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b816a52-242c-497f-b65a-c2342e54e6ac {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.332511] env[62133]: DEBUG nova.compute.provider_tree [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.346928] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.473543] env[62133]: DEBUG nova.network.neutron [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.836726] env[62133]: DEBUG nova.scheduler.client.report [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.850284] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Releasing lock "refresh_cache-0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.850413] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 546.851250] env[62133]: DEBUG nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 546.851937] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 546.876735] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.967091] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 546.967325] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 546.967506] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 546.967702] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 546.970019] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 546.970019] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 546.970569] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 546.971230] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 546.971434] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 546.971599] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 546.971768] env[62133]: DEBUG nova.virt.hardware [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 546.972692] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9f149c-8485-4ce4-b7dd-ed2ebdebe0d8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.976188] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Releasing lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 546.977508] env[62133]: DEBUG nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 546.977508] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 546.977947] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-017a930c-6281-4c98-b5b8-d1c0f9fb7319 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.986188] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed9d035a-346c-45b1-9297-7848bf0f6f99 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.993387] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13803204-fe0e-46d2-9242-137112e4cc1e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.012167] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance VIF info [] {{(pid=62133) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 547.017959] env[62133]: DEBUG oslo.service.loopingcall [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.018741] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Creating VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 547.019018] env[62133]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abf3529b-45fa-4d1a-b424-04bfbbcb6523 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.035625] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c2c48d98-b791-42c9-8038-d969dda7573e could not be found. [ 547.035870] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 547.036038] env[62133]: INFO nova.compute.manager [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Took 0.06 seconds to destroy the instance on the hypervisor. [ 547.036275] env[62133]: DEBUG oslo.service.loopingcall [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.036960] env[62133]: DEBUG nova.compute.manager [-] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.037094] env[62133]: DEBUG nova.network.neutron [-] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.041716] env[62133]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 547.041716] env[62133]: value = "task-835149" [ 547.041716] env[62133]: _type = "Task" [ 547.041716] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.050664] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835149, 'name': CreateVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.064363] env[62133]: DEBUG nova.network.neutron [-] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.186838] env[62133]: ERROR nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f99571ee-62f3-44d4-9b3f-40a5e71946e4, please check neutron logs for more information. [ 547.186838] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.186838] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.186838] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.186838] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.186838] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.186838] env[62133]: ERROR nova.compute.manager raise self.value [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.186838] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 547.186838] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.186838] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 547.187560] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.187560] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 547.187560] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f99571ee-62f3-44d4-9b3f-40a5e71946e4, please check neutron logs for more information. [ 547.187560] env[62133]: ERROR nova.compute.manager [ 547.187560] env[62133]: Traceback (most recent call last): [ 547.187560] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 547.187560] env[62133]: listener.cb(fileno) [ 547.187560] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.187560] env[62133]: result = function(*args, **kwargs) [ 547.187560] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.187560] env[62133]: return func(*args, **kwargs) [ 547.187560] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.187560] env[62133]: raise e [ 547.187560] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.187560] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 547.187560] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.187560] env[62133]: created_port_ids = self._update_ports_for_instance( [ 547.187560] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.187560] env[62133]: with excutils.save_and_reraise_exception(): [ 547.187560] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.187560] env[62133]: self.force_reraise() [ 547.187560] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.187560] env[62133]: raise self.value [ 547.187560] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.187560] env[62133]: updated_port = self._update_port( [ 547.187560] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.187560] env[62133]: _ensure_no_port_binding_failure(port) [ 547.187560] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.187560] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 547.188772] env[62133]: nova.exception.PortBindingFailed: Binding failed for port f99571ee-62f3-44d4-9b3f-40a5e71946e4, please check neutron logs for more information. [ 547.188772] env[62133]: Removing descriptor: 16 [ 547.188772] env[62133]: ERROR nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f99571ee-62f3-44d4-9b3f-40a5e71946e4, please check neutron logs for more information. [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Traceback (most recent call last): [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] yield resources [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] self.driver.spawn(context, instance, image_meta, [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.188772] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] vm_ref = self.build_virtual_machine(instance, [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] for vif in network_info: [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] return self._sync_wrapper(fn, *args, **kwargs) [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] self.wait() [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] self[:] = self._gt.wait() [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] return self._exit_event.wait() [ 547.189152] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] result = hub.switch() [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] return self.greenlet.switch() [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] result = function(*args, **kwargs) [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] return func(*args, **kwargs) [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] raise e [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] nwinfo = self.network_api.allocate_for_instance( [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.189558] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] created_port_ids = self._update_ports_for_instance( [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] with excutils.save_and_reraise_exception(): [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] self.force_reraise() [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] raise self.value [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] updated_port = self._update_port( [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] _ensure_no_port_binding_failure(port) [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.189974] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] raise exception.PortBindingFailed(port_id=port['id']) [ 547.190310] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] nova.exception.PortBindingFailed: Binding failed for port f99571ee-62f3-44d4-9b3f-40a5e71946e4, please check neutron logs for more information. [ 547.190310] env[62133]: ERROR nova.compute.manager [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] [ 547.190310] env[62133]: INFO nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Terminating instance [ 547.190310] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Acquiring lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.190310] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Acquired lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.190310] env[62133]: DEBUG nova.network.neutron [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 547.244754] env[62133]: DEBUG oslo_concurrency.lockutils [None req-efe99f13-fc44-4e4e-b991-ddeac4493bc3 tempest-ServersTestMultiNic-119293823 tempest-ServersTestMultiNic-119293823-project-member] Acquiring lock "bebe84f3-f56d-4a81-9001-a041df442b56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 547.245017] env[62133]: DEBUG oslo_concurrency.lockutils [None req-efe99f13-fc44-4e4e-b991-ddeac4493bc3 tempest-ServersTestMultiNic-119293823 tempest-ServersTestMultiNic-119293823-project-member] Lock "bebe84f3-f56d-4a81-9001-a041df442b56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.348000] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.874s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.349478] env[62133]: ERROR nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Traceback (most recent call last): [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.driver.spawn(context, instance, image_meta, [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] vm_ref = self.build_virtual_machine(instance, [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] vif_infos = vmwarevif.get_vif_info(self._session, [ 547.349478] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] for vif in network_info: [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self._sync_wrapper(fn, *args, **kwargs) [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.wait() [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self[:] = self._gt.wait() [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self._exit_event.wait() [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] result = hub.switch() [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 547.349789] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return self.greenlet.switch() [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] result = function(*args, **kwargs) [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] return func(*args, **kwargs) [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise e [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] nwinfo = self.network_api.allocate_for_instance( [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] created_port_ids = self._update_ports_for_instance( [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] with excutils.save_and_reraise_exception(): [ 547.350135] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] self.force_reraise() [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise self.value [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] updated_port = self._update_port( [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] _ensure_no_port_binding_failure(port) [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] raise exception.PortBindingFailed(port_id=port['id']) [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] nova.exception.PortBindingFailed: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. [ 547.350483] env[62133]: ERROR nova.compute.manager [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] [ 547.351226] env[62133]: DEBUG nova.compute.utils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 547.352483] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.406s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.352673] env[62133]: DEBUG nova.objects.instance [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62133) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 547.356340] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Build of instance bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b was re-scheduled: Binding failed for port a38e9eb9-5c90-4a44-9cf0-1f5f93036483, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 547.357065] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 547.357290] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquiring lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.357633] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Acquired lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.357633] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 547.380921] env[62133]: DEBUG nova.network.neutron [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.405053] env[62133]: DEBUG nova.compute.manager [None req-bf72faf6-1f63-4ac9-a0a2-68fce0b51fa0 tempest-ServerDiagnosticsV248Test-1763132665 tempest-ServerDiagnosticsV248Test-1763132665-project-admin] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 547.405053] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cde9ec2-91d9-4dc4-9cb6-93c6ae15b018 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.411069] env[62133]: INFO nova.compute.manager [None req-bf72faf6-1f63-4ac9-a0a2-68fce0b51fa0 tempest-ServerDiagnosticsV248Test-1763132665 tempest-ServerDiagnosticsV248Test-1763132665-project-admin] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Retrieving diagnostics [ 547.411069] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523e18ea-73e2-42ca-8b05-f28b48295e58 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.553585] env[62133]: DEBUG oslo_vmware.api [-] Task: {'id': task-835149, 'name': CreateVM_Task, 'duration_secs': 0.485501} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 547.553585] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Created VM on the ESX host {{(pid=62133) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 547.553585] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 547.553585] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.553585] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 547.553585] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e15b87-43b9-4c7d-88c0-94fb9008c1e9 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.558158] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 547.558158] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]521e4fad-2f7d-2b0d-24da-dd29429eb19c" [ 547.558158] env[62133]: _type = "Task" [ 547.558158] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 547.566381] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]521e4fad-2f7d-2b0d-24da-dd29429eb19c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 547.567786] env[62133]: DEBUG nova.network.neutron [-] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.714971] env[62133]: DEBUG nova.network.neutron [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.843959] env[62133]: DEBUG nova.network.neutron [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 547.882114] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.884538] env[62133]: INFO nova.compute.manager [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] Took 1.03 seconds to deallocate network for instance. [ 548.008596] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.048179] env[62133]: DEBUG nova.compute.manager [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Received event network-changed-713659db-b702-4ebc-9dfa-675daceee5eb {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 548.048179] env[62133]: DEBUG nova.compute.manager [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Refreshing instance network info cache due to event network-changed-713659db-b702-4ebc-9dfa-675daceee5eb. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 548.048179] env[62133]: DEBUG oslo_concurrency.lockutils [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] Acquiring lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.048179] env[62133]: DEBUG oslo_concurrency.lockutils [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] Acquired lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.048179] env[62133]: DEBUG nova.network.neutron [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Refreshing network info cache for port 713659db-b702-4ebc-9dfa-675daceee5eb {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 548.072022] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]521e4fad-2f7d-2b0d-24da-dd29429eb19c, 'name': SearchDatastore_Task, 'duration_secs': 0.022799} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.072022] env[62133]: INFO nova.compute.manager [-] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Took 1.03 seconds to deallocate network for instance. [ 548.072022] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.072022] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Processing image 0d8cd420-ac77-4102-b0d0-bf339970e2ce {{(pid=62133) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 548.072022] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.072663] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.072663] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 548.074258] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c24b1cf-ff44-41dd-a901-c956aa0f0847 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.077036] env[62133]: DEBUG nova.compute.claims [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 548.077352] env[62133]: DEBUG oslo_concurrency.lockutils [None req-38427b21-f27d-4ab3-b466-c826eaf09a70 tempest-ServersWithSpecificFlavorTestJSON-1599827642 tempest-ServersWithSpecificFlavorTestJSON-1599827642-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.087019] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62133) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 548.087019] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62133) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 548.087019] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd67d79c-d00d-4b5a-b9a9-a79c85a6fdab {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.097761] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 548.097761] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b2c2d5-0a21-6440-d279-7469e3de6f5c" [ 548.097761] env[62133]: _type = "Task" [ 548.097761] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.109432] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b2c2d5-0a21-6440-d279-7469e3de6f5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.261814] env[62133]: DEBUG oslo_concurrency.lockutils [None req-95dece08-0425-4157-b674-9055122a70d5 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] Acquiring lock "d5180e57-746a-4c5b-a00a-438eaa7ecf7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.262097] env[62133]: DEBUG oslo_concurrency.lockutils [None req-95dece08-0425-4157-b674-9055122a70d5 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] Lock "d5180e57-746a-4c5b-a00a-438eaa7ecf7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.346945] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Releasing lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.347545] env[62133]: DEBUG nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 548.347769] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 548.348082] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d024457b-625c-438b-bb43-173309ce9dbf {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.360330] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d7500f-2f9d-4ab1-a5d0-83e1516804fb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.378985] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5a4713a4-ad1a-433c-82fb-8d9c5a57ce13 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 548.378985] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 10.451s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 548.390955] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 584b1af2-6903-4a08-8ed2-9cd21075f0de could not be found. [ 548.390955] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 548.390955] env[62133]: INFO nova.compute.manager [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Took 0.04 seconds to destroy the instance on the hypervisor. [ 548.390955] env[62133]: DEBUG oslo.service.loopingcall [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 548.397269] env[62133]: DEBUG nova.compute.manager [-] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 548.397269] env[62133]: DEBUG nova.network.neutron [-] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.425145] env[62133]: DEBUG nova.network.neutron [-] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.433175] env[62133]: DEBUG nova.compute.manager [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Received event network-changed-f99571ee-62f3-44d4-9b3f-40a5e71946e4 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 548.433175] env[62133]: DEBUG nova.compute.manager [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Refreshing instance network info cache due to event network-changed-f99571ee-62f3-44d4-9b3f-40a5e71946e4. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 548.433175] env[62133]: DEBUG oslo_concurrency.lockutils [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] Acquiring lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.433175] env[62133]: DEBUG oslo_concurrency.lockutils [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] Acquired lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.433175] env[62133]: DEBUG nova.network.neutron [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Refreshing network info cache for port f99571ee-62f3-44d4-9b3f-40a5e71946e4 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 548.513912] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Releasing lock "refresh_cache-bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.514163] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 548.514418] env[62133]: DEBUG nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 548.514615] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 548.534925] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.576086] env[62133]: DEBUG nova.network.neutron [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.610549] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52b2c2d5-0a21-6440-d279-7469e3de6f5c, 'name': SearchDatastore_Task, 'duration_secs': 0.009169} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 548.611708] env[62133]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2fc6ab7-7be1-465b-999b-66e35657bf9e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.617101] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 548.617101] env[62133]: value = "session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52742b7f-fe6b-358b-00ad-e833c9320ea1" [ 548.617101] env[62133]: _type = "Task" [ 548.617101] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 548.625849] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52742b7f-fe6b-358b-00ad-e833c9320ea1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 548.688116] env[62133]: DEBUG nova.network.neutron [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.929374] env[62133]: DEBUG nova.network.neutron [-] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.965225] env[62133]: DEBUG nova.network.neutron [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.971262] env[62133]: INFO nova.scheduler.client.report [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Deleted allocations for instance 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30 [ 549.038844] env[62133]: DEBUG nova.network.neutron [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.130129] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': session[527a9f23-e5f0-66dd-9f36-e3d27aa4a338]52742b7f-fe6b-358b-00ad-e833c9320ea1, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.130129] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.130129] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 549.130129] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6bca0ade-7827-460a-a1e8-2f7a99fd394a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.136482] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 549.136482] env[62133]: value = "task-835150" [ 549.136482] env[62133]: _type = "Task" [ 549.136482] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.144731] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835150, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.155159] env[62133]: DEBUG nova.network.neutron [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.191375] env[62133]: DEBUG oslo_concurrency.lockutils [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] Releasing lock "refresh_cache-c2c48d98-b791-42c9-8038-d969dda7573e" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.191639] env[62133]: DEBUG nova.compute.manager [req-65fd324d-bc29-416c-acdc-b9bd10cb2e08 req-d3e87f6a-8660-4bbf-9be0-5edee80e9011 service nova] [instance: c2c48d98-b791-42c9-8038-d969dda7573e] Received event network-vif-deleted-713659db-b702-4ebc-9dfa-675daceee5eb {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 549.408343] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf0dcb8-2742-485b-bd11-372c2393c8dd {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.417016] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09c1ff2-af77-415a-bc49-f958038e992c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.454787] env[62133]: INFO nova.compute.manager [-] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Took 1.06 seconds to deallocate network for instance. [ 549.457405] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5652e2b-47a4-4a88-9833-cf9dc1ede92b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.460640] env[62133]: DEBUG nova.compute.claims [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Aborting claim: {{(pid=62133) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 549.470339] env[62133]: DEBUG oslo_concurrency.lockutils [None req-149ddd2c-aa0c-47f0-89cd-44dad721b10c tempest-ServerActionsTestOtherB-1059985462 tempest-ServerActionsTestOtherB-1059985462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.470339] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ad2e00-ba59-430d-9290-f014f2aa0d53 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.476567] env[62133]: DEBUG oslo_concurrency.lockutils [None req-43ccb6ed-8212-45cb-a02e-dd48a1e2f3bc tempest-ServerDiagnosticsTest-1644469761 tempest-ServerDiagnosticsTest-1644469761-project-member] Lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.634s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.486339] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 17.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.486563] env[62133]: INFO nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: 0e5473a7-4356-4bc9-a1fd-83b0dfae3a30] During sync_power_state the instance has a pending task (spawning). Skip. [ 549.486783] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "0e5473a7-4356-4bc9-a1fd-83b0dfae3a30" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.487257] env[62133]: DEBUG nova.compute.provider_tree [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 549.542312] env[62133]: INFO nova.compute.manager [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] Took 1.03 seconds to deallocate network for instance. [ 549.648581] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835150, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489099} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 549.648841] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/0d8cd420-ac77-4102-b0d0-bf339970e2ce/0d8cd420-ac77-4102-b0d0-bf339970e2ce.vmdk to [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk {{(pid=62133) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1424}} [ 549.649061] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extending root virtual disk to 1048576 {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 549.649308] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93030824-6222-4702-9d23-308167657654 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.655762] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 549.655762] env[62133]: value = "task-835151" [ 549.655762] env[62133]: _type = "Task" [ 549.655762] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 549.659074] env[62133]: DEBUG oslo_concurrency.lockutils [req-9330a89a-8c75-4b1a-b204-9b3c4d8c1901 req-47b42133-ec50-4f10-a32e-df23a3ec1434 service nova] Releasing lock "refresh_cache-584b1af2-6903-4a08-8ed2-9cd21075f0de" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.663869] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835151, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 549.898405] env[62133]: DEBUG oslo_concurrency.lockutils [None req-00a65733-9938-4725-9263-c9b3de258b68 tempest-ServerActionsTestOtherA-1144559951 tempest-ServerActionsTestOtherA-1144559951-project-member] Acquiring lock "fd069a41-3344-4ab7-9b0d-b5dff9520bc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.898629] env[62133]: DEBUG oslo_concurrency.lockutils [None req-00a65733-9938-4725-9263-c9b3de258b68 tempest-ServerActionsTestOtherA-1144559951 tempest-ServerActionsTestOtherA-1144559951-project-member] Lock "fd069a41-3344-4ab7-9b0d-b5dff9520bc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.898852] env[62133]: DEBUG oslo_concurrency.lockutils [None req-ba6c1502-9d46-4952-8241-8e0b1ec9ab7f tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Acquiring lock "0d4b25da-dfdb-441b-bbc0-fc784669e07c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.899059] env[62133]: DEBUG oslo_concurrency.lockutils [None req-ba6c1502-9d46-4952-8241-8e0b1ec9ab7f tempest-DeleteServersAdminTestJSON-72498087 tempest-DeleteServersAdminTestJSON-72498087-project-member] Lock "0d4b25da-dfdb-441b-bbc0-fc784669e07c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.989153] env[62133]: DEBUG nova.compute.manager [None req-569bd7d0-9a0f-4b5e-aaf2-7b1c2cc9e6c4 tempest-ImagesOneServerTestJSON-1256341287 tempest-ImagesOneServerTestJSON-1256341287-project-member] [instance: e9f6cbc1-b11a-4361-aa3b-ec5eef3d703e] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 549.993702] env[62133]: DEBUG nova.scheduler.client.report [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 550.166023] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835151, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063115} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 550.166731] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Extended root virtual disk {{(pid=62133) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 550.167128] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4099810d-45ab-44e1-8be5-18705085c35a {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.189343] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 550.189343] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2ddb8f7-3dbc-4952-8487-6c2483c0cccb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.211779] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 550.211779] env[62133]: value = "task-835152" [ 550.211779] env[62133]: _type = "Task" [ 550.211779] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 550.221017] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.501418] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.128s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 550.502539] env[62133]: ERROR nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] Traceback (most recent call last): [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.driver.spawn(context, instance, image_meta, [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] vm_ref = self.build_virtual_machine(instance, [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] vif_infos = vmwarevif.get_vif_info(self._session, [ 550.502539] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] for vif in network_info: [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self._sync_wrapper(fn, *args, **kwargs) [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.wait() [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self[:] = self._gt.wait() [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self._exit_event.wait() [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] result = hub.switch() [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 550.502950] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return self.greenlet.switch() [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] result = function(*args, **kwargs) [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] return func(*args, **kwargs) [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise e [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] nwinfo = self.network_api.allocate_for_instance( [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] created_port_ids = self._update_ports_for_instance( [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] with excutils.save_and_reraise_exception(): [ 550.503373] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] self.force_reraise() [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise self.value [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] updated_port = self._update_port( [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] _ensure_no_port_binding_failure(port) [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] raise exception.PortBindingFailed(port_id=port['id']) [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] nova.exception.PortBindingFailed: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. [ 550.503816] env[62133]: ERROR nova.compute.manager [instance: f8330803-064a-4f31-b769-19b0ce281e07] [ 550.504530] env[62133]: DEBUG nova.compute.utils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 550.507340] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.507s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 550.509442] env[62133]: INFO nova.compute.claims [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 550.519916] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Build of instance f8330803-064a-4f31-b769-19b0ce281e07 was re-scheduled: Binding failed for port 34081212-a7c2-42e3-b868-a211575a9557, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 550.519916] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 550.519916] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquiring lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 550.519916] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Acquired lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 550.520219] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 550.539713] env[62133]: DEBUG oslo_concurrency.lockutils [None req-569bd7d0-9a0f-4b5e-aaf2-7b1c2cc9e6c4 tempest-ImagesOneServerTestJSON-1256341287 tempest-ImagesOneServerTestJSON-1256341287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.613154] env[62133]: INFO nova.scheduler.client.report [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Deleted allocations for instance bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b [ 550.725073] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835152, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 550.890842] env[62133]: DEBUG oslo_concurrency.lockutils [None req-19c3384f-96c6-477d-a2a9-9766a1475e40 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] Acquiring lock "08ae027b-64b5-4d9d-a478-5d052d2cabf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.891096] env[62133]: DEBUG oslo_concurrency.lockutils [None req-19c3384f-96c6-477d-a2a9-9766a1475e40 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] Lock "08ae027b-64b5-4d9d-a478-5d052d2cabf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.044987] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.122437] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f0f3ee88-3341-431a-8b92-56a20474e344 tempest-AttachInterfacesV270Test-1828955100 tempest-AttachInterfacesV270Test-1828955100-project-member] Lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.500s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.123933] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 18.639s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.124150] env[62133]: INFO nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b] During sync_power_state the instance has a pending task (spawning). Skip. [ 551.124360] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "bc29ceaa-1fe9-492c-9c0a-1fbfc42f843b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.204128] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.225844] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835152, 'name': ReconfigVM_Task, 'duration_secs': 0.806112} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.226127] env[62133]: DEBUG nova.virt.vmwareapi.volumeops [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Reconfigured VM instance instance-00000001 to attach disk [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42/6174840e-10a6-43ed-be9c-76602f91bf42.vmdk or device None with type sparse {{(pid=62133) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 551.227469] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2035da3-1b85-4412-9641-e82ec84ae6ce {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.238294] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 551.238294] env[62133]: value = "task-835153" [ 551.238294] env[62133]: _type = "Task" [ 551.238294] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.248489] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835153, 'name': Rename_Task} progress is 5%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.629792] env[62133]: DEBUG nova.compute.manager [None req-efe99f13-fc44-4e4e-b991-ddeac4493bc3 tempest-ServersTestMultiNic-119293823 tempest-ServersTestMultiNic-119293823-project-member] [instance: bebe84f3-f56d-4a81-9001-a041df442b56] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 551.709779] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Releasing lock "refresh_cache-f8330803-064a-4f31-b769-19b0ce281e07" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.710338] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 551.710338] env[62133]: DEBUG nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 551.710430] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 551.734627] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 551.752046] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835153, 'name': Rename_Task, 'duration_secs': 0.209899} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 551.752611] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 551.752899] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c3d39b0-c5e3-4867-8412-0c805818cf59 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.762581] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Waiting for the task: (returnval){ [ 551.762581] env[62133]: value = "task-835154" [ 551.762581] env[62133]: _type = "Task" [ 551.762581] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 551.772035] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 551.885465] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9014ee4b-2895-42f7-a7bb-3a431b2cdb3c {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.895494] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f11bb8-a57d-4921-97ba-90082a1cad6e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.929310] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e98eaf-57a1-4b0c-b6c8-16e26c28c86e {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.934360] env[62133]: DEBUG nova.compute.manager [req-dd98cf1b-3c1a-4e8c-ae3d-673848b21f6f req-38be7524-4128-4ea3-9a7d-463ebc4ce619 service nova] [instance: 584b1af2-6903-4a08-8ed2-9cd21075f0de] Received event network-vif-deleted-f99571ee-62f3-44d4-9b3f-40a5e71946e4 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 551.941701] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e778e8ee-0eea-4c06-a5e9-aed3417efdc7 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.963760] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.177548] env[62133]: DEBUG oslo_concurrency.lockutils [None req-efe99f13-fc44-4e4e-b991-ddeac4493bc3 tempest-ServersTestMultiNic-119293823 tempest-ServersTestMultiNic-119293823-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.236967] env[62133]: DEBUG nova.network.neutron [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 552.273574] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835154, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.494282] env[62133]: ERROR nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [req-e8e57acf-0816-481e-ac23-b2bc699ee3ee] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2bb88767-ae4a-4310-888a-696a7895e29b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e8e57acf-0816-481e-ac23-b2bc699ee3ee"}]} [ 552.517852] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing inventories for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 552.544811] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating ProviderTree inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 552.544952] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 552.561685] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing aggregate associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, aggregates: None {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 552.595169] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing trait associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 552.740270] env[62133]: INFO nova.compute.manager [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] [instance: f8330803-064a-4f31-b769-19b0ce281e07] Took 1.03 seconds to deallocate network for instance. [ 552.780566] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835154, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 552.865171] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc12cbc-8693-4b14-8753-4bf9d98662df {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.872443] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409395af-4f6e-4fba-961a-d2abe049df22 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.902082] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e817db9e-9ec5-40ac-b8d6-b71b2e7eee03 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.909996] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8a94db-b6b2-44ef-be8c-69e63e4b4aa8 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.924944] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.275827] env[62133]: DEBUG oslo_vmware.api [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Task: {'id': task-835154, 'name': PowerOnVM_Task, 'duration_secs': 1.092751} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 553.276767] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered on the VM {{(pid=62133) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 553.276767] env[62133]: DEBUG nova.compute.manager [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 553.277867] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9639c12-a523-4c18-8d48-cb61ca6fa357 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.445532] env[62133]: ERROR nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [req-1d4d91c1-4525-4266-bec0-d5eac03b4c1a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 2bb88767-ae4a-4310-888a-696a7895e29b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d4d91c1-4525-4266-bec0-d5eac03b4c1a"}]} [ 553.463056] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing inventories for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 553.476054] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating ProviderTree inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 553.476280] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 61, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 553.487594] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing aggregate associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, aggregates: None {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 553.512629] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Refreshing trait associations for resource provider 2bb88767-ae4a-4310-888a-696a7895e29b, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=62133) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 553.689075] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8532abd5-0b16-4a6e-ab88-8266d2136693 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.697432] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ac9c00-1d5e-497f-9f56-cdbdbe49046b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.726160] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1825f9-0d96-4915-a1d5-aff80e78efdb {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.733163] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0773a89-e73c-4a39-aaf6-b5a7edb448f2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.746109] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 554.302021] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f79850aa-67ac-40b9-b0c3-f26728b440cd tempest-ServersAdmin275Test-1588497836 tempest-ServersAdmin275Test-1588497836-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.315471] env[62133]: DEBUG nova.scheduler.client.report [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updated inventory for provider 2bb88767-ae4a-4310-888a-696a7895e29b with generation 30 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 554.315727] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating resource provider 2bb88767-ae4a-4310-888a-696a7895e29b generation from 30 to 31 during operation: update_inventory {{(pid=62133) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 554.315902] env[62133]: DEBUG nova.compute.provider_tree [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Updating inventory in ProviderTree for provider 2bb88767-ae4a-4310-888a-696a7895e29b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 554.702518] env[62133]: INFO nova.scheduler.client.report [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Deleted allocations for instance f8330803-064a-4f31-b769-19b0ce281e07 [ 554.713711] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f9a4f701-fadd-484d-b2d6-e0dfc10bb65c tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Acquiring lock "78cccd5a-2bfa-4daf-af4a-c339d03cd140" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.713914] env[62133]: DEBUG oslo_concurrency.lockutils [None req-f9a4f701-fadd-484d-b2d6-e0dfc10bb65c tempest-MigrationsAdminTest-1737530868 tempest-MigrationsAdminTest-1737530868-project-member] Lock "78cccd5a-2bfa-4daf-af4a-c339d03cd140" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.822772] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.315s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 554.823104] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 554.825760] env[62133]: DEBUG oslo_concurrency.lockutils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.609s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.827170] env[62133]: INFO nova.compute.claims [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 555.215565] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b26fbb94-c938-4039-803f-24c4a3489a93 tempest-ServerExternalEventsTest-698468913 tempest-ServerExternalEventsTest-698468913-project-member] Lock "f8330803-064a-4f31-b769-19b0ce281e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.768s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.215565] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "f8330803-064a-4f31-b769-19b0ce281e07" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 22.729s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.215565] env[62133]: INFO nova.compute.manager [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] [instance: f8330803-064a-4f31-b769-19b0ce281e07] During sync_power_state the instance has a pending task (spawning). Skip. [ 555.215565] env[62133]: DEBUG oslo_concurrency.lockutils [None req-07c0cbd6-dd24-4d67-8f94-f3ea7710c099 None None] Lock "f8330803-064a-4f31-b769-19b0ce281e07" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 555.331985] env[62133]: DEBUG nova.compute.utils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 555.338997] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 555.338997] env[62133]: DEBUG nova.network.neutron [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 555.392814] env[62133]: DEBUG nova.policy [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82faaf470f8f45e0b5360f38f7a1fbda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13f11c32eda647fb82b5cff79c7443be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 555.718602] env[62133]: DEBUG nova.compute.manager [None req-95dece08-0425-4157-b674-9055122a70d5 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] [instance: d5180e57-746a-4c5b-a00a-438eaa7ecf7e] Starting instance... {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 555.845780] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 555.961032] env[62133]: DEBUG nova.network.neutron [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Successfully created port: e77577c1-2553-4763-93f4-2723d291e3d6 {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 556.065165] env[62133]: DEBUG oslo_concurrency.lockutils [None req-189cae53-36f7-4574-b280-02bf1ce359d2 tempest-VolumesAdminNegativeTest-336984350 tempest-VolumesAdminNegativeTest-336984350-project-member] Acquiring lock "bf6a17ce-a959-4ee4-9ecd-2e299f11048b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.065468] env[62133]: DEBUG oslo_concurrency.lockutils [None req-189cae53-36f7-4574-b280-02bf1ce359d2 tempest-VolumesAdminNegativeTest-336984350 tempest-VolumesAdminNegativeTest-336984350-project-member] Lock "bf6a17ce-a959-4ee4-9ecd-2e299f11048b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.175602] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d314bf9a-bf07-4cae-b5d0-a0bd5ba1a973 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.183526] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fae6ea-c03e-47ab-b48b-6033b5ac17d2 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.216390] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeb82bc-000c-45ed-b954-b3be8d9e4f4f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.223063] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402a4bf0-3aff-4a54-bfa2-fdc186777782 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.244026] env[62133]: DEBUG nova.compute.provider_tree [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.258470] env[62133]: DEBUG oslo_concurrency.lockutils [None req-95dece08-0425-4157-b674-9055122a70d5 tempest-ServersAdminTestJSON-1897932561 tempest-ServersAdminTestJSON-1897932561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 556.749317] env[62133]: DEBUG nova.scheduler.client.report [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 556.859940] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 556.938758] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 556.938993] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 556.939166] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 556.939394] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 556.939496] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 556.939770] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 556.940137] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 556.940347] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 556.940565] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 556.941126] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 556.941126] env[62133]: DEBUG nova.virt.hardware [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 556.941987] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b84069-3eee-452a-a819-b7cd9095425d {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.951311] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b372a90d-3a68-466f-ac8e-2bece20f7eb9 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.258734] env[62133]: DEBUG oslo_concurrency.lockutils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.258734] env[62133]: DEBUG nova.compute.manager [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Start building networks asynchronously for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 557.262332] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.442s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.645075] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "6174840e-10a6-43ed-be9c-76602f91bf42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.645075] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "6174840e-10a6-43ed-be9c-76602f91bf42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.645075] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "6174840e-10a6-43ed-be9c-76602f91bf42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.645075] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "6174840e-10a6-43ed-be9c-76602f91bf42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.645522] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Lock "6174840e-10a6-43ed-be9c-76602f91bf42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.647863] env[62133]: INFO nova.compute.manager [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Terminating instance [ 557.650371] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquiring lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.651500] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Acquired lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.651500] env[62133]: DEBUG nova.network.neutron [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 557.767195] env[62133]: DEBUG nova.compute.utils [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Using /dev/sd instead of None {{(pid=62133) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 557.771625] env[62133]: DEBUG nova.compute.manager [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Allocating IP information in the background. {{(pid=62133) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 557.771804] env[62133]: DEBUG nova.network.neutron [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] allocate_for_instance() {{(pid=62133) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 557.820970] env[62133]: DEBUG nova.policy [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fafd478340d1494e9e3e49c24cb1b124', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '190596354b3941f4911b5209df766130', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62133) authorize /opt/stack/nova/nova/policy.py:203}} [ 557.941223] env[62133]: DEBUG oslo_concurrency.lockutils [None req-543dc76c-539e-4737-bbfb-e08a9b206be1 tempest-ListImageFiltersTestJSON-1979703624 tempest-ListImageFiltersTestJSON-1979703624-project-member] Acquiring lock "3257f4ed-d1d5-4c41-aa2a-1732772ec628" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.941627] env[62133]: DEBUG oslo_concurrency.lockutils [None req-543dc76c-539e-4737-bbfb-e08a9b206be1 tempest-ListImageFiltersTestJSON-1979703624 tempest-ListImageFiltersTestJSON-1979703624-project-member] Lock "3257f4ed-d1d5-4c41-aa2a-1732772ec628" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 558.151512] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdcf952-4b82-4bef-b90e-aa3d16636d4f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.170761] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fa2067-5174-43df-a4ce-86c1c75930f5 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.209829] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8b659c-051c-402e-8916-9d4ebd48f74f {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.210693] env[62133]: DEBUG nova.network.neutron [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.221053] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d903607-4744-4983-b655-ce7257c92981 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.248671] env[62133]: DEBUG nova.compute.provider_tree [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Inventory has not changed in ProviderTree for provider: 2bb88767-ae4a-4310-888a-696a7895e29b {{(pid=62133) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.276850] env[62133]: DEBUG nova.compute.manager [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Start building block device mappings for instance. {{(pid=62133) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 558.410637] env[62133]: DEBUG nova.network.neutron [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.647405] env[62133]: DEBUG nova.network.neutron [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Successfully created port: b1269bc2-98dd-4193-9375-594a4f6e30fe {{(pid=62133) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 558.754069] env[62133]: DEBUG nova.scheduler.client.report [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Inventory has not changed for provider 2bb88767-ae4a-4310-888a-696a7895e29b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 60, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62133) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 558.826212] env[62133]: ERROR nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e77577c1-2553-4763-93f4-2723d291e3d6, please check neutron logs for more information. [ 558.826212] env[62133]: ERROR nova.compute.manager Traceback (most recent call last): [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.826212] env[62133]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.826212] env[62133]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.826212] env[62133]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.826212] env[62133]: ERROR nova.compute.manager self.force_reraise() [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.826212] env[62133]: ERROR nova.compute.manager raise self.value [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.826212] env[62133]: ERROR nova.compute.manager updated_port = self._update_port( [ 558.826212] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.826212] env[62133]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 558.827033] env[62133]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.827033] env[62133]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 558.827033] env[62133]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e77577c1-2553-4763-93f4-2723d291e3d6, please check neutron logs for more information. [ 558.827033] env[62133]: ERROR nova.compute.manager [ 558.827033] env[62133]: Traceback (most recent call last): [ 558.827033] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 558.827033] env[62133]: listener.cb(fileno) [ 558.827033] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.827033] env[62133]: result = function(*args, **kwargs) [ 558.827033] env[62133]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 558.827033] env[62133]: return func(*args, **kwargs) [ 558.827033] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.827033] env[62133]: raise e [ 558.827033] env[62133]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.827033] env[62133]: nwinfo = self.network_api.allocate_for_instance( [ 558.827033] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.827033] env[62133]: created_port_ids = self._update_ports_for_instance( [ 558.827033] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.827033] env[62133]: with excutils.save_and_reraise_exception(): [ 558.827033] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.827033] env[62133]: self.force_reraise() [ 558.827033] env[62133]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.827033] env[62133]: raise self.value [ 558.827033] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.827033] env[62133]: updated_port = self._update_port( [ 558.827033] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.827033] env[62133]: _ensure_no_port_binding_failure(port) [ 558.827033] env[62133]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.827033] env[62133]: raise exception.PortBindingFailed(port_id=port['id']) [ 558.827979] env[62133]: nova.exception.PortBindingFailed: Binding failed for port e77577c1-2553-4763-93f4-2723d291e3d6, please check neutron logs for more information. [ 558.827979] env[62133]: Removing descriptor: 16 [ 558.827979] env[62133]: ERROR nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e77577c1-2553-4763-93f4-2723d291e3d6, please check neutron logs for more information. [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Traceback (most recent call last): [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] yield resources [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] self.driver.spawn(context, instance, image_meta, [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 558.827979] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] vm_ref = self.build_virtual_machine(instance, [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] vif_infos = vmwarevif.get_vif_info(self._session, [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] for vif in network_info: [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] return self._sync_wrapper(fn, *args, **kwargs) [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] self.wait() [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] self[:] = self._gt.wait() [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] return self._exit_event.wait() [ 558.828473] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] result = hub.switch() [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] return self.greenlet.switch() [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] result = function(*args, **kwargs) [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] return func(*args, **kwargs) [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] raise e [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] nwinfo = self.network_api.allocate_for_instance( [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 558.828882] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] created_port_ids = self._update_ports_for_instance( [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] with excutils.save_and_reraise_exception(): [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] self.force_reraise() [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] raise self.value [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] updated_port = self._update_port( [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] _ensure_no_port_binding_failure(port) [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 558.829274] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] raise exception.PortBindingFailed(port_id=port['id']) [ 558.831234] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] nova.exception.PortBindingFailed: Binding failed for port e77577c1-2553-4763-93f4-2723d291e3d6, please check neutron logs for more information. [ 558.831234] env[62133]: ERROR nova.compute.manager [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] [ 558.831234] env[62133]: INFO nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Terminating instance [ 558.831234] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Acquiring lock "refresh_cache-2fbc4d53-a000-4f14-a4b7-087aee2129bc" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 558.831234] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Acquired lock "refresh_cache-2fbc4d53-a000-4f14-a4b7-087aee2129bc" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 558.831234] env[62133]: DEBUG nova.network.neutron [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 558.915878] env[62133]: DEBUG oslo_concurrency.lockutils [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Releasing lock "refresh_cache-6174840e-10a6-43ed-be9c-76602f91bf42" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.916248] env[62133]: DEBUG nova.compute.manager [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 558.916489] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 558.918761] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9d8daa-031c-4170-8d97-d82c8f99c476 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.930298] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powering off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 558.930298] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-93111900-c3c3-4982-9914-d92a3a872156 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.937756] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 558.937756] env[62133]: value = "task-835155" [ 558.937756] env[62133]: _type = "Task" [ 558.937756] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 558.948113] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.267577] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.006s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.268294] env[62133]: ERROR nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Traceback (most recent call last): [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.driver.spawn(context, instance, image_meta, [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] vm_ref = self.build_virtual_machine(instance, [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.268294] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] for vif in network_info: [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self._sync_wrapper(fn, *args, **kwargs) [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.wait() [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self[:] = self._gt.wait() [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self._exit_event.wait() [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] result = hub.switch() [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.268725] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return self.greenlet.switch() [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] result = function(*args, **kwargs) [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] return func(*args, **kwargs) [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise e [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] nwinfo = self.network_api.allocate_for_instance( [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] created_port_ids = self._update_ports_for_instance( [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] with excutils.save_and_reraise_exception(): [ 559.269140] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] self.force_reraise() [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise self.value [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] updated_port = self._update_port( [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] _ensure_no_port_binding_failure(port) [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] raise exception.PortBindingFailed(port_id=port['id']) [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] nova.exception.PortBindingFailed: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. [ 559.269531] env[62133]: ERROR nova.compute.manager [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] [ 559.269854] env[62133]: DEBUG nova.compute.utils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. {{(pid=62133) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 559.273091] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Build of instance f774f4eb-7329-4449-8567-b4c0dc8fd78a was re-scheduled: Binding failed for port aa3074f9-6c4f-46e2-8201-f9f0bdff1699, please check neutron logs for more information. {{(pid=62133) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 559.273773] env[62133]: DEBUG nova.compute.manager [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Unplugging VIFs for instance {{(pid=62133) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 559.273973] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquiring lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.274143] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Acquired lock "refresh_cache-f774f4eb-7329-4449-8567-b4c0dc8fd78a" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.274411] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Building network info cache for instance {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 559.275649] env[62133]: DEBUG oslo_concurrency.lockutils [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.430s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.277398] env[62133]: INFO nova.compute.claims [None req-b0ab7f0a-9616-4d7a-ba1f-d1c49041e791 tempest-InstanceActionsTestJSON-1669720906 tempest-InstanceActionsTestJSON-1669720906-project-member] [instance: f62a3436-a4a7-42a2-8023-913319df1d47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.293551] env[62133]: DEBUG nova.compute.manager [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] [instance: 0773334e-8541-439e-aa0c-83dacac16242] Start spawning the instance on the hypervisor. {{(pid=62133) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 559.350464] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-09-11T19:10:12Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-09-11T19:09:55Z,direct_url=,disk_format='vmdk',id=0d8cd420-ac77-4102-b0d0-bf339970e2ce,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='4081a99a6d47439bb62d019175c7a8ec',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-09-11T19:09:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 559.350871] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Flavor limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 559.350968] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Image limits 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 559.351260] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Flavor pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 559.351441] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Image pref 0:0:0 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 559.351624] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62133) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 559.351817] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 559.351980] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 559.352167] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Got 1 possible topologies {{(pid=62133) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 559.352331] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 559.352503] env[62133]: DEBUG nova.virt.hardware [None req-baa667b9-0669-4ea8-8b7a-d93938f24538 tempest-SecurityGroupsTestJSON-424398262 tempest-SecurityGroupsTestJSON-424398262-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62133) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 559.353586] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f6ae0d-96d5-4c1a-aed5-ad90aeca1d7b {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.357773] env[62133]: DEBUG nova.network.neutron [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.365931] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73085fd9-9329-45f0-8b99-8ec28cf12bb1 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.454225] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835155, 'name': PowerOffVM_Task, 'duration_secs': 0.144526} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 559.454541] env[62133]: DEBUG nova.virt.vmwareapi.vm_util [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Powered off the VM {{(pid=62133) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 559.454723] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistering the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 559.455015] env[62133]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa4746c6-f086-43e1-bb47-52f754928f42 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.484985] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Unregistered the VM {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 559.484985] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleting contents of the VM from datastore datastore2 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 559.485174] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleting the datastore file [datastore2] 6174840e-10a6-43ed-be9c-76602f91bf42 {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 559.485549] env[62133]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e8bbc04-8372-4b31-a788-241007bd4905 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 559.493549] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for the task: (returnval){ [ 559.493549] env[62133]: value = "task-835157" [ 559.493549] env[62133]: _type = "Task" [ 559.493549] env[62133]: } to complete. {{(pid=62133) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 559.509243] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835157, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 559.524205] env[62133]: DEBUG nova.network.neutron [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.605408] env[62133]: DEBUG oslo_concurrency.lockutils [None req-7f22a2e8-c663-4beb-8c48-c25c901fbe96 tempest-ListImageFiltersTestJSON-1979703624 tempest-ListImageFiltersTestJSON-1979703624-project-member] Acquiring lock "abbebf20-65d6-44b9-90c9-a9485f042b55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.605408] env[62133]: DEBUG oslo_concurrency.lockutils [None req-7f22a2e8-c663-4beb-8c48-c25c901fbe96 tempest-ListImageFiltersTestJSON-1979703624 tempest-ListImageFiltersTestJSON-1979703624-project-member] Lock "abbebf20-65d6-44b9-90c9-a9485f042b55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62133) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.807699] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.932452] env[62133]: DEBUG nova.network.neutron [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] [instance: f774f4eb-7329-4449-8567-b4c0dc8fd78a] Updating instance_info_cache with network_info: [] {{(pid=62133) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.004778] env[62133]: DEBUG oslo_vmware.api [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Task: {'id': task-835157, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189498} completed successfully. {{(pid=62133) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 560.005163] env[62133]: DEBUG nova.virt.vmwareapi.ds_util [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Deleted the datastore file {{(pid=62133) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 560.005288] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deleted contents of the VM from datastore datastore2 {{(pid=62133) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 560.005465] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 560.005855] env[62133]: INFO nova.compute.manager [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Took 1.09 seconds to destroy the instance on the hypervisor. [ 560.006140] env[62133]: DEBUG oslo.service.loopingcall [None req-77eaf446-ee68-4d44-915a-751275788f25 tempest-ServersAdmin275Test-1220984892 tempest-ServersAdmin275Test-1220984892-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.007028] env[62133]: DEBUG nova.compute.manager [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.007028] env[62133]: DEBUG nova.network.neutron [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 560.030934] env[62133]: DEBUG oslo_concurrency.lockutils [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Releasing lock "refresh_cache-2fbc4d53-a000-4f14-a4b7-087aee2129bc" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 560.031504] env[62133]: DEBUG nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Start destroying the instance on the hypervisor. {{(pid=62133) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 560.031667] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Destroying instance {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 560.032686] env[62133]: DEBUG nova.network.neutron [-] [instance: 6174840e-10a6-43ed-be9c-76602f91bf42] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.033863] env[62133]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e00ffa64-ccd5-4113-8609-94ad2b4ff347 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.047838] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2577062e-5154-4c0b-8f4a-ab333d818023 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.066991] env[62133]: DEBUG nova.compute.manager [None req-aa5c352f-d690-4e26-95b9-1c0de0178186 tempest-ServerDiagnosticsV248Test-1763132665 tempest-ServerDiagnosticsV248Test-1763132665-project-admin] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Checking state {{(pid=62133) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 560.070531] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2157c9d-2a1c-4e02-8cf2-052f6516b853 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.077476] env[62133]: INFO nova.compute.manager [None req-aa5c352f-d690-4e26-95b9-1c0de0178186 tempest-ServerDiagnosticsV248Test-1763132665 tempest-ServerDiagnosticsV248Test-1763132665-project-admin] [instance: 3410b0c4-da88-4146-bb50-9b5a9a5d0d87] Retrieving diagnostics [ 560.086797] env[62133]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a087251-58cc-49e4-966d-2feab3d1c085 {{(pid=62133) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.089454] env[62133]: WARNING nova.virt.vmwareapi.vmops [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2fbc4d53-a000-4f14-a4b7-087aee2129bc could not be found. [ 560.089715] env[62133]: DEBUG nova.virt.vmwareapi.vmops [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Instance destroyed {{(pid=62133) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 560.090790] env[62133]: INFO nova.compute.manager [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Took 0.06 seconds to destroy the instance on the hypervisor. [ 560.090790] env[62133]: DEBUG oslo.service.loopingcall [None req-d7f1d626-46fe-42ff-a0b2-57cc27cfb4fb tempest-VolumesAssistedSnapshotsTest-1706064471 tempest-VolumesAssistedSnapshotsTest-1706064471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62133) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 560.090790] env[62133]: DEBUG nova.compute.manager [-] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Deallocating network for instance {{(pid=62133) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 560.090941] env[62133]: DEBUG nova.network.neutron [-] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] deallocate_for_instance() {{(pid=62133) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 560.122945] env[62133]: DEBUG nova.network.neutron [-] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Instance cache missing network info. {{(pid=62133) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.206472] env[62133]: DEBUG nova.compute.manager [req-bf7a654f-d8aa-44c7-9434-3e1c8e95cdcc req-6b41bf54-01e8-4e7e-a084-4f8e74de4849 service nova] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Received event network-changed-e77577c1-2553-4763-93f4-2723d291e3d6 {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11129}} [ 560.207032] env[62133]: DEBUG nova.compute.manager [req-bf7a654f-d8aa-44c7-9434-3e1c8e95cdcc req-6b41bf54-01e8-4e7e-a084-4f8e74de4849 service nova] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Refreshing instance network info cache due to event network-changed-e77577c1-2553-4763-93f4-2723d291e3d6. {{(pid=62133) external_instance_event /opt/stack/nova/nova/compute/manager.py:11134}} [ 560.207114] env[62133]: DEBUG oslo_concurrency.lockutils [req-bf7a654f-d8aa-44c7-9434-3e1c8e95cdcc req-6b41bf54-01e8-4e7e-a084-4f8e74de4849 service nova] Acquiring lock "refresh_cache-2fbc4d53-a000-4f14-a4b7-087aee2129bc" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.207243] env[62133]: DEBUG oslo_concurrency.lockutils [req-bf7a654f-d8aa-44c7-9434-3e1c8e95cdcc req-6b41bf54-01e8-4e7e-a084-4f8e74de4849 service nova] Acquired lock "refresh_cache-2fbc4d53-a000-4f14-a4b7-087aee2129bc" {{(pid=62133) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.207403] env[62133]: DEBUG nova.network.neutron [req-bf7a654f-d8aa-44c7-9434-3e1c8e95cdcc req-6b41bf54-01e8-4e7e-a084-4f8e74de4849 service nova] [instance: 2fbc4d53-a000-4f14-a4b7-087aee2129bc] Refreshing network info cache for port e77577c1-2553-4763-93f4-2723d291e3d6 {{(pid=62133) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 560.436389] env[62133]: DEBUG oslo_concurrency.lockutils [None req-5ce19bb4-41e7-43e7-895b-056fb54ba1b8 tempest-ServerDiagnosticsNegativeTest-411291900 tempest-ServerDiagnosticsNegativeTest-411291900-project-member] Releasing lock